Files
internal-docs/LaTeX2e+Proceedings+Templates+download/main.bbl

268 lines
13 KiB
Plaintext
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
\begin{thebibliography}{10}
\providecommand{\url}[1]{\texttt{#1}}
\providecommand{\urlprefix}{URL }
\providecommand{\doi}[1]{https://doi.org/#1}
\bibitem{10.1145/3055366.3055375}
Ahmed, C.M., Palleti, V.R., Mathur, A.P.: Wadi: a water distribution testbed
for research in the design of secure cyber physical systems. In: Proceedings
of the 3rd International Workshop on Cyber-Physical Systems for Smart Water
Networks. p. 2528. CySWATER '17, Association for Computing Machinery, New
York, NY, USA (2017)
\bibitem{info16100910}
Ali, J., Ali, S., Al~Balushi, T., Nadir, Z.: Intrusion detection in industrial
control systems using transfer learning guided by reinforcement learning.
Information \textbf{16}(10) (2025)
\bibitem{austin2021structured}
Austin, J., Johnson, D.D., Ho, J., Tarlow, D., van~den Berg, R.: Structured
denoising diffusion models in discrete state-spaces. In: Ranzato, M.,
Beygelzimer, A., Dauphin, Y., Liang, P., Vaughan, J.W. (eds.) Advances in
Neural Information Processing Systems. vol.~34, pp. 17981--17993. Curran
Associates, Inc. (2021)
\bibitem{coletta2023constrained}
Coletta, A., Gopalakrishnan, S., Borrajo, D., Vyetrenko, S.: On the constrained
time-series generation problem. In: Oh, A., Naumann, T., Globerson, A.,
Saenko, K., Hardt, M., Levine, S. (eds.) Advances in Neural Information
Processing Systems. vol.~36, pp. 61048--61059. Curran Associates, Inc. (2023)
\bibitem{dai2019transformerxlattentivelanguagemodels}
Dai, Z., Yang, Z., Yang, Y., Carbonell, J., Le, Q., Salakhutdinov, R.:
Transformer-{XL}: Attentive language models beyond a fixed-length context.
In: Korhonen, A., Traum, D., M{\`a}rquez, L. (eds.) Proceedings of the 57th
Annual Meeting of the Association for Computational Linguistics. pp.
2978--2988. Association for Computational Linguistics, Florence, Italy (Jul
2019)
\bibitem{godefroid2017learnfuzzmachinelearninginput}
Godefroid, P., Peleg, H., Singh, R.: Learn\&fuzz: Machine learning for input
fuzzing. In: 2017 32nd IEEE/ACM International Conference on Automated
Software Engineering (ASE). pp. 50--59 (2017)
\bibitem{hang2023efficient}
Hang, T., Gu, S., Li, C., Bao, J., Chen, D., Hu, H., Geng, X., Guo, B.:
Efficient diffusion training via min-snr weighting strategy. In: Proceedings
of the IEEE/CVF International Conference on Computer Vision (ICCV). pp.
7441--7451 (October 2023)
\bibitem{ho2020denoising}
Ho, J., Jain, A., Abbeel, P.: Denoising diffusion probabilistic models. In:
Larochelle, H., Ranzato, M., Hadsell, R., Balcan, M., Lin, H. (eds.) Advances
in Neural Information Processing Systems. vol.~33, pp. 6840--6851. Curran
Associates, Inc. (2020)
\bibitem{hoogeboom2021argmaxflowsmultinomialdiffusion}
Hoogeboom, E., Nielsen, D., Jaini, P., Forr\'{e}, P., Welling, M.: Argmax flows
and multinomial diffusion: Learning categorical distributions. In: Ranzato,
M., Beygelzimer, A., Dauphin, Y., Liang, P., Vaughan, J.W. (eds.) Advances in
Neural Information Processing Systems. vol.~34, pp. 12454--12465. Curran
Associates, Inc. (2021)
\bibitem{jiang2023netdiffusionnetworkdataaugmentation}
Jiang, X., Liu, S., Gember-Jacobson, A., Bhagoji, A.N., Schmitt, P., Bronzino,
F., Feamster, N.: Netdiffusion: Network data augmentation through
protocol-constrained traffic generation. Proc. ACM Meas. Anal. Comput. Syst.
\textbf{8}(1) (Feb 2024)
\bibitem{10.1007/s10844-022-00753-1}
Koay, A.M., Ko, R.K.L., Hettema, H., Radke, K.: Machine learning in industrial
control system (ics) security: current landscape, opportunities and
challenges. Journal of Intelligent Information Systems \textbf{60}(2),
377--405 (2023)
\bibitem{kollovieh2023tsdiff}
Kollovieh, M., Ansari, A.F., Bohlke-Schneider, M., Zschiegner, J., Wang, H.,
Wang, Y.B.: Predict, refine, synthesize: Self-guiding diffusion models for
probabilistic time series forecasting. In: Oh, A., Naumann, T., Globerson,
A., Saenko, K., Hardt, M., Levine, S. (eds.) Advances in Neural Information
Processing Systems. vol.~36, pp. 28341--28364. Curran Associates, Inc. (2023)
\bibitem{kong2021diffwaveversatilediffusionmodel}
Kong, Z., Ping, W., Huang, J., Zhao, K., Catanzaro, B.: Diffwave: A versatile
diffusion model for audio synthesis (2021)
\bibitem{pmlr-v202-kotelnikov23a}
Kotelnikov, A., Baranchuk, D., Rubachev, I., Babenko, A.: {T}ab{DDPM}:
Modelling tabular data with diffusion models. In: Krause, A., Brunskill, E.,
Cho, K., Engelhardt, B., Sabato, S., Scarlett, J. (eds.) Proceedings of the
40th International Conference on Machine Learning. Proceedings of Machine
Learning Research, vol.~202, pp. 17564--17579. PMLR (23--29 Jul 2023)
\bibitem{li2022diffusionlmimprovescontrollabletext}
Li, X., Thickstun, J., Gulrajani, I., Liang, P.S., Hashimoto, T.B.:
Diffusion-lm improves controllable text generation. In: Koyejo, S., Mohamed,
S., Agarwal, A., Belgrave, D., Cho, K., Oh, A. (eds.) Advances in Neural
Information Processing Systems. vol.~35, pp. 4328--4343. Curran Associates,
Inc. (2022)
\bibitem{lin1991divergence}
Lin, J.: Divergence measures based on the shannon entropy. IEEE Transactions on
Information Theory \textbf{37}(1), 145--151 (1991)
\bibitem{Lin_2020}
Lin, Z., Jain, A., Wang, C., Fanti, G., Sekar, V.: Using gans for sharing
networked time series data: Challenges, initial promise, and open questions.
In: Proceedings of the ACM Internet Measurement Conference. p. 464483. IMC
'20, Association for Computing Machinery, New York, NY, USA (2020)
\bibitem{liu2023pristiconditionaldiffusionframework}
Liu, M., Huang, H., Feng, H., Sun, L., Du, B., Fu, Y.: Pristi: A conditional
diffusion framework for spatiotemporal imputation. In: 2023 IEEE 39th
International Conference on Data Engineering (ICDE). pp. 1927--1939 (2023)
\bibitem{11087622}
Liu, X., Xu, X., Liu, Z., Li, Z., Wu, K.: Spatio-temporal diffusion model for
cellular traffic generation. IEEE Transactions on Mobile Computing
\textbf{25}(1), 257--271 (2026)
\bibitem{7469060}
Mathur, A.P., Tippenhauer, N.O.: Swat: a water treatment testbed for research
and training on ics security. In: 2016 International Workshop on
Cyber-physical Systems for Smart Water Networks (CySWater). pp. 31--36 (2016)
\bibitem{meng2025aflnetyearslatercoverageguided}
Meng, R., Pham, V.T., Böhme, M., Roychoudhury, A.: Aflnet five years later: On
coverage-guided protocol fuzzing. IEEE Transactions on Software Engineering
\textbf{51}(4), 960--974 (2025)
\bibitem{Nankya2023-gp}
Nankya, M., Chataut, R., Akl, R.: Securing industrial control systems:
Components, cyber threats, and machine learning-driven defense strategies.
Sensors \textbf{23}(21) (2023)
\bibitem{nist2023sp80082}
{National Institute of Standards and Technology}: Guide to operational
technology (ot) security. Special Publication 800-82 Rev. 3, NIST (sep 2023)
\bibitem{nie2023patchtst}
Nie, Y., Nguyen, N.H., Sinthong, P., Kalagnanam, J.: A time series is worth 64
words: Long-term forecasting with transformers. In: International Conference
on Learning Representations (ICLR) (2023)
\bibitem{rasul2021autoregressivedenoisingdiffusionmodels}
Rasul, K., Seward, C., Schuster, I., Vollgraf, R.: Autoregressive denoising
diffusion models for multivariate probabilistic time series forecasting. In:
Meila, M., Zhang, T. (eds.) Proceedings of the 38th International Conference
on Machine Learning. Proceedings of Machine Learning Research, vol.~139, pp.
8857--8868. PMLR (18--24 Jul 2021)
\bibitem{Ring_2019}
Ring, M., Schlör, D., Landes, D., Hotho, A.: Flow-based network traffic
generation using generative adversarial networks. Computers \& Security
\textbf{82}, 156--172 (2019)
\bibitem{sha2026ddpm}
Sha, Y., Yuan, Y., Wu, Y., Zhao, H.: Ddpm fusing mamba and adaptive attention:
An augmentation method for industrial control systems anomaly data (jan
2026), sSRN Electronic Journal
\bibitem{she2019neuzzefficientfuzzingneural}
She, D., Pei, K., Epstein, D., Yang, J., Ray, B., Jana, S.: Neuzz: Efficient
fuzzing with neural program smoothing. In: 2019 IEEE Symposium on Security
and Privacy (SP). pp. 803--817 (2019)
\bibitem{shi2024simplified}
Shi, J., Han, K., Wang, Z., Doucet, A., Titsias, M.: Simplified and generalized
masked diffusion for discrete data. In: Globerson, A., Mackey, L., Belgrave,
D., Fan, A., Paquet, U., Tomczak, J., Zhang, C. (eds.) Advances in Neural
Information Processing Systems. vol.~37, pp. 103131--103167. Curran
Associates, Inc. (2024)
\bibitem{shi2025tabdiff}
Shi, J., Xu, M., Hua, H., Zhang, H., Ermon, S., Leskovec, J.: Tabdiff: a
mixed-type diffusion model for tabular data generation (2025)
\bibitem{shin}
Shin, H.K., Lee, W., Choi, S., Yun, J.H., Min, B.G., Kim, H.: Hai security
dataset (2023)
\bibitem{sikder2023transfusion}
Sikder, M.F., Ramachandranpillai, R., Heintz, F.: Transfusion: Generating long,
high fidelity time series using diffusion models with transformers. Machine
Learning with Applications \textbf{20}, 100652 (2025)
\bibitem{song2021score}
Song, Y., Sohl-Dickstein, J., Kingma, D.P., Kumar, A., Ermon, S., Poole, B.:
Score-based generative modeling through stochastic differential equations
(2021)
\bibitem{stenger2024survey}
Stenger, M., Leppich, R., Foster, I.T., Kounev, S., Bauer, A.: Evaluation is
key: a survey on evaluation measures for synthetic time series. Journal of
Big Data \textbf{11}(1), ~66 (2024)
\bibitem{tashiro2021csdiconditionalscorebaseddiffusion}
Tashiro, Y., Song, J., Song, Y., Ermon, S.: Csdi: Conditional score-based
diffusion models for probabilistic time series imputation. In: Ranzato, M.,
Beygelzimer, A., Dauphin, Y., Liang, P., Vaughan, J.W. (eds.) Advances in
Neural Information Processing Systems. vol.~34, pp. 24804--24816. Curran
Associates, Inc. (2021)
\bibitem{vaswani2017attention}
Vaswani, A., Shazeer, N., Parmar, N., Uszkoreit, J., Jones, L., Gomez, A.N.,
Kaiser, L.u., Polosukhin, I.: Attention is all you need. In: Guyon, I.,
Luxburg, U.V., Bengio, S., Wallach, H., Fergus, R., Vishwanathan, S.,
Garnett, R. (eds.) Advances in Neural Information Processing Systems.
vol.~30. Curran Associates, Inc. (2017)
\bibitem{10.1145/1151659.1159928}
Vishwanath, K.V., Vahdat, A.: Realistic and responsive network traffic
generation. SIGCOMM Comput. Commun. Rev. \textbf{36}(4), 111122 (Aug
2006)
\bibitem{wen2024diffstgprobabilisticspatiotemporalgraph}
Wen, H., Lin, Y., Xia, Y., Wan, H., Wen, Q., Zimmermann, R., Liang, Y.:
Diffstg: Probabilistic spatio-temporal graph forecasting with denoising
diffusion models. In: Proceedings of the 31st ACM International Conference on
Advances in Geographic Information Systems. SIGSPATIAL '23, Association for
Computing Machinery, New York, NY, USA (2023)
\bibitem{wu2022autoformerdecompositiontransformersautocorrelation}
Wu, H., Xu, J., Wang, J., Long, M.: Autoformer: Decomposition transformers with
auto-correlation for long-term series forecasting. In: Ranzato, M.,
Beygelzimer, A., Dauphin, Y., Liang, P., Vaughan, J.W. (eds.) Advances in
Neural Information Processing Systems. vol.~34, pp. 22419--22430. Curran
Associates, Inc. (2021)
\bibitem{yang2001interlock}
Yang, S., Tan, L., He, C.: Automatic verification of safety interlock systems
for industrial processes. Journal of Loss Prevention in the Process
Industries \textbf{14}(5), 379--386 (2001)
\bibitem{10.1145/3544216.3544251}
Yin, Y., Lin, Z., Jin, M., Fanti, G., Sekar, V.: Practical gan-based synthetic
ip header trace generation using netshare. In: Proceedings of the ACM SIGCOMM
2022 Conference. p. 458472. SIGCOMM '22, Association for Computing
Machinery, New York, NY, USA (2022)
\bibitem{yoon2019timegan}
Yoon, J., Jarrett, D., van~der Schaar, M.: Time-series generative adversarial
networks. In: Wallach, H., Larochelle, H., Beygelzimer, A., d\textquotesingle
Alch\'{e}-Buc, F., Fox, E., Garnett, R. (eds.) Advances in Neural Information
Processing Systems. vol.~32. Curran Associates, Inc. (2019)
\bibitem{yuan2025ctu}
Yuan, Y., Sha, Y., Zhao, H.: Ctu-ddpm: Generating industrial control system
time-series data with a cnn-transformer hybrid diffusion model. In:
Proceedings of the 2025 International Symposium on Artificial Intelligence
and Computational Social Sciences. p. 547552. AICSS '25, Association for
Computing Machinery, New York, NY, USA (2025)
\bibitem{zhou2021informerefficienttransformerlong}
Zhou, H., Zhang, S., Peng, J., Zhang, S., Li, J., Xiong, H., Zhang, W.:
Informer: Beyond efficient transformer for long sequence time-series
forecasting. Proceedings of the AAAI Conference on Artificial Intelligence
\textbf{35}(12), 11106--11115 (May 2021)
\bibitem{zhou2022fedformerfrequencyenhanceddecomposed}
Zhou, T., Ma, Z., Wen, Q., Wang, X., Sun, L., Jin, R.: {FED}former: Frequency
enhanced decomposed transformer for long-term series forecasting. In:
Chaudhuri, K., Jegelka, S., Song, L., Szepesvari, C., Niu, G., Sabato, S.
(eds.) Proceedings of the 39th International Conference on Machine Learning.
Proceedings of Machine Learning Research, vol.~162, pp. 27268--27286. PMLR
(17--23 Jul 2022)
\end{thebibliography}