<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3.dtd">
<article article-type="research-article" dtd-version="1.3" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xml:lang="ru"><front><journal-meta><journal-id journal-id-type="publisher-id">ntv</journal-id><journal-title-group><journal-title xml:lang="ru">Научно-технический вестник информационных технологий, механики и оптики</journal-title><trans-title-group xml:lang="en"><trans-title>Scientific and Technical Journal of Information Technologies, Mechanics and Optics</trans-title></trans-title-group></journal-title-group><issn pub-type="ppub">2226-1494</issn><issn pub-type="epub">2500-0373</issn><publisher><publisher-name>Университет ИТМО</publisher-name></publisher></journal-meta><article-meta><article-id pub-id-type="doi">10.17586/2226-1494-2024-24-5-806-814</article-id><article-id custom-type="elpub" pub-id-type="custom">ntv-151</article-id><article-categories><subj-group subj-group-type="heading"><subject>Research Article</subject></subj-group><subj-group subj-group-type="section-heading" xml:lang="ru"><subject>КОМПЬЮТЕРНЫЕ СИСТЕМЫ И ИНФОРМАЦИОННЫЕ ТЕХНОЛОГИИ</subject></subj-group><subj-group subj-group-type="section-heading" xml:lang="en"><subject>COMPUTER SCIENCE</subject></subj-group></article-categories><title-group><article-title>Сравнительный анализ нейросетевых моделей для картографирования лесных рубок по летним космическим снимкам</article-title><trans-title-group xml:lang="en"><trans-title>Comparative analysis of neural network models for felling mapping in summer satellite imagery</trans-title></trans-title-group></title-group><contrib-group><contrib contrib-type="author" corresp="yes"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0002-1073-7108</contrib-id><name-alternatives><name name-style="eastern" xml:lang="ru"><surname>Мельников</surname><given-names>А. В.</given-names></name><name name-style="western" xml:lang="en"><surname>Melnikov</surname><given-names>A. V.</given-names></name></name-alternatives><bio xml:lang="ru"><p>Мельников Андрей Витальевич - доктор технических наук, профессор, директор; профессор</p><p>Ханты-Мансийск, 628011</p><p>Ханты-Мансийск, 628011</p></bio><bio xml:lang="en"><p>Andrey V. Melnikov - D.Sc., Professor, Director; Professor</p><p>Khanty-Mansiysk, 628011</p><p>Khanty-Mansiysk, 628011</p></bio><email xlink:type="simple">melnikovav@uriit.ru</email><xref ref-type="aff" rid="aff-1"/></contrib><contrib contrib-type="author" corresp="yes"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0002-4944-4919</contrib-id><name-alternatives><name name-style="eastern" xml:lang="ru"><surname>Полищук</surname><given-names>Ю. М.</given-names></name><name name-style="western" xml:lang="en"><surname>Polishchuk</surname><given-names>Yu. M.</given-names></name></name-alternatives><bio xml:lang="ru"><p>Полищук Юрий Михайлович - доктор физико-математических наук, профессор, главный научный сотрудник</p><p>Ханты-Мансийск, 628011</p></bio><bio xml:lang="en"><p>Yuri M. Polishchuk - D.Sc. (Physics &amp; Mathematics), Professor</p><p>Khanty-Mansiysk, 628011</p></bio><email xlink:type="simple">polichukjm@uriit.ru</email><xref ref-type="aff" rid="aff-2"/></contrib><contrib contrib-type="author" corresp="yes"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0002-9926-4609</contrib-id><name-alternatives><name name-style="eastern" xml:lang="ru"><surname>Русанов</surname><given-names>М. А.</given-names></name><name name-style="western" xml:lang="en"><surname>Rusanov</surname><given-names>M. A.</given-names></name></name-alternatives><bio xml:lang="ru"><p>Русанов Михаил Александрович - руководитель центра; старший преподаватель</p><p>Ханты-Мансийск, 628011</p><p>Ханты-Мансийск, 628011</p></bio><bio xml:lang="en"><p>Mikhail A. Rusanov - Head of the Center</p><p>Khanty-Mansiysk, 628011</p><p>Khanty-Mansiysk, 628011</p></bio><email xlink:type="simple">rusanovma@uriit.ru</email><xref ref-type="aff" rid="aff-1"/></contrib><contrib contrib-type="author" corresp="yes"><contrib-id contrib-id-type="orcid">https://orcid.org/0009-0008-9315-2041</contrib-id><name-alternatives><name name-style="eastern" xml:lang="ru"><surname>Аббазов</surname><given-names>В. Р.</given-names></name><name name-style="western" xml:lang="en"><surname>Abbazov</surname><given-names>V. R.</given-names></name></name-alternatives><bio xml:lang="ru"><p>Аббазов Валерьян Ринатович - ведущий программист</p><p>Ханты-Мансийск, 628011</p></bio><bio xml:lang="en"><p>Valerian R. Abbazov - Leading Software Developer</p><p>Khanty-Mansiysk, 628011</p></bio><email xlink:type="simple">abbazovvr@uriit.ru</email><xref ref-type="aff" rid="aff-2"/></contrib><contrib contrib-type="author" corresp="yes"><contrib-id contrib-id-type="orcid">https://orcid.org/0009-0001-4875-7489</contrib-id><name-alternatives><name name-style="eastern" xml:lang="ru"><surname>Кочергин</surname><given-names>Г. А.</given-names></name><name name-style="western" xml:lang="en"><surname>Kochergin</surname><given-names>G. A.</given-names></name></name-alternatives><bio xml:lang="ru"><p>Кочергин Глеб Александрович - кандидат технических наук, руководитель центра; доцент</p><p>Ханты-Мансийск, 628011</p><p>Ханты-Мансийск, 628011</p></bio><bio xml:lang="en"><p>Gleb A. Kochergin - PhD, Head of the Center; Associate Professor</p><p>Khanty-Mansiysk, 628011</p><p>Khanty-Mansiysk, 628011</p></bio><email xlink:type="simple">kocherginga@uriit.ru</email><xref ref-type="aff" rid="aff-1"/></contrib><contrib contrib-type="author" corresp="yes"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0002-9476-2887</contrib-id><name-alternatives><name name-style="eastern" xml:lang="ru"><surname>Куприянов</surname><given-names>М. А.</given-names></name><name name-style="western" xml:lang="en"><surname>Kupriyanov</surname><given-names>M. A.</given-names></name></name-alternatives><bio xml:lang="ru"><p>Куприянов Матвей Андреевич - главный специалист</p><p>Ханты-Мансийск, 628011</p></bio><bio xml:lang="en"><p>Matvey A. Kupriyanov - Chief Specialist</p><p>Khanty-Mansiysk, 628011</p></bio><email xlink:type="simple">kupriyanovma@uriit.ru</email><xref ref-type="aff" rid="aff-2"/></contrib><contrib contrib-type="author" corresp="yes"><contrib-id contrib-id-type="orcid">https://orcid.org/0009-0000-0633-3832</contrib-id><name-alternatives><name name-style="eastern" xml:lang="ru"><surname>Байсалямова</surname><given-names>О. А.</given-names></name><name name-style="western" xml:lang="en"><surname>Baisalyamova</surname><given-names>O. A.</given-names></name></name-alternatives><bio xml:lang="ru"><p>Байсалямова Оксана Ахметсафаевна - главный специалист</p><p>Ханты-Мансийск, 628011</p></bio><bio xml:lang="en"><p>Oksana A. Baisalyamova - Chief Specialist</p><p>Khanty-Mansiysk, 628011</p></bio><email xlink:type="simple">baysalyamovaoa@uriit.ru</email><xref ref-type="aff" rid="aff-2"/></contrib><contrib contrib-type="author" corresp="yes"><contrib-id contrib-id-type="orcid">https://orcid.org/0009-0006-5869-1395</contrib-id><name-alternatives><name name-style="eastern" xml:lang="ru"><surname>Соколков</surname><given-names>О. И.</given-names></name><name name-style="western" xml:lang="en"><surname>Sokolkov</surname><given-names>O. I.</given-names></name></name-alternatives><bio xml:lang="ru"><p>Соколков Олег Игоревич - программист</p><p>Ханты-Мансийск, 628011</p></bio><bio xml:lang="en"><p>Oleg I. Sokolkov - Software Developer</p><p>Khanty-Mansiysk, 628011</p></bio><email xlink:type="simple">sokolkovoi@uriit.ru</email><xref ref-type="aff" rid="aff-3"/></contrib></contrib-group><aff-alternatives id="aff-1"><aff xml:lang="ru">Югорский научно-исследовательский институт информационных технологий; Югорский государственный университет<country>Россия</country></aff><aff xml:lang="en">Ugra Research Institute of Information Technologies; Yugra State University<country>Russian Federation</country></aff></aff-alternatives><aff-alternatives id="aff-2"><aff xml:lang="ru">Югорский научно-исследовательский институт информационных технологий<country>Россия</country></aff><aff xml:lang="en">Ugra Research Institute of Information Technologies<country>Russian Federation</country></aff></aff-alternatives><aff-alternatives id="aff-3"><aff xml:lang="en">Ugra Research Institute of Information Technologies<country>Russian Federation</country></aff></aff-alternatives><pub-date pub-type="collection"><year>2024</year></pub-date><pub-date pub-type="epub"><day>14</day><month>12</month><year>2024</year></pub-date><volume>24</volume><issue>5</issue><fpage>806</fpage><lpage>814</lpage><permissions><copyright-statement>Copyright &amp;#x00A9; Мельников А.В., Полищук Ю.М., Русанов М.А., Аббазов В.Р., Кочергин Г.А., Куприянов М.А., Байсалямова О.А., Соколков О.И., 2024</copyright-statement><copyright-year>2024</copyright-year><copyright-holder xml:lang="ru">Мельников А.В., Полищук Ю.М., Русанов М.А., Аббазов В.Р., Кочергин Г.А., Куприянов М.А., Байсалямова О.А., Соколков О.И.</copyright-holder><copyright-holder xml:lang="en">Melnikov A.V., Polishchuk Y.M., Rusanov M.A., Abbazov V.R., Kochergin G.A., Kupriyanov M.A., Baisalyamova O.A., Sokolkov O.I.</copyright-holder><license license-type="creative-commons-attribution" xlink:href="https://creativecommons.org/licenses/by/4.0/" xlink:type="simple"><license-p>This work is licensed under a Creative Commons Attribution 4.0 License.</license-p></license></permissions><self-uri xlink:href="https://ntv.elpub.ru/jour/article/view/151">https://ntv.elpub.ru/jour/article/view/151</self-uri><abstract><p>Введение. Исследована задача повышения оперативности обнаружения и картографирования лесных рубок по космическим снимкам с целью выявления нарушений экологического законодательства. Традиционные методы дешифрирования данных дистанционного зондирования Земли требуют больших трудозатрат и высокой квалификации исполнителей. Для автоматизации процессов дешифрирования космических снимков разработано большое количество разнообразных методов, в том числе основанных на применении современных технологий глубокого машинного обучения. В работе проведен сравнительный анализ сверточных и трансформерных моделей нейронных сетей, перспективных для решения задач сегментации лесных рубок по летним космическим снимкам со спутника Sentinel-2.Метод. В проведенном исследовании для сегментации лесных рубок применялись сверточные модели U-Net++, MA-Net, 3D U-Net, FPN-ConvLSTM и трансформерные модели SegFormer, Swin-UperNet. Особенностью компьютерного эксперимента является адаптация различных моделей нейронных сетей для анализа пары разновременных многоканальных спутниковых изображений. Представлено описание исходных данных, процедура их предобработки с учетом специфики и методика формирования обучающей выборки на основе имеющегося архива космических снимков. Предложены процедуры обучения и оценки точности рассматриваемых нейросетевых моделей с использованием метрики F1. Для оценки точности выполнено сравнение результатов моделирования с традиционным методом визуального дешифрирования с применением средств геоинформационных систем.Основные результаты. Получены результаты компьютерного эксперимента на примере территории Ханты-Мансийского автономного округа. Сравнение моделей сегментации лесных рубок по летним космическим снимкам показало, что точность F1 для разных моделей находится в пределах от 0,409 до 0,767. Наибольшую точность показала трансформерная модель SegFormer, которая позволила обнаруживать лесные рубки, неучтенные человеком. Время обработки одной пары полноразмерных космических снимков площади размером 100 × 100 км2 составило 15 мин, что в 16 раз меньше времени, требуемого специалисту для выполнения той же задачи традиционным способом. Такая скорость обработки снимков является важным показателем для мониторинга обширных лесохозяйственных территорий.Обсуждение. Предлагаемый метод сегментации лесных рубок, основанный на трансформерной нейронной сети SegFormer, может быть использован для решения задачи оперативного выявления и картографирования незаконных лесных рубок. Для повышения качества работы модели необходима балансировка обучающей выборки с целью выравнивания количества снимков с полигонами рубок различной формы и размера, а также включение в выборку снимков с частичной облачностью и тенями от облаков.</p></abstract><trans-abstract xml:lang="en"><p>The study aimed to improve the efficiency of detecting and mapping felling using satellite imagery, in order to identify violations of environmental regulations. Traditional remote sensing data interpretation methods are labor-intensive and require high operator expertise. To automate the satellite image interpretation process, numerous approaches have been developed, including those leveraging advanced deep machine learning technologies. The presented work conducted a comparative analysis of convolutional and transformer neural network models for the segmentation of felling in summer Sentinel-2 satellite imagery. The convolutional models evaluated included U-Net++, MA-Net, 3D U-Net, and FPN-ConvLSTM, while the transformer models were SegFormer and Swin-UperNet. A key aspect was the adaptation of these models to analyze pairs of multi-temporal, multi-channel satellite images. The data preprocessing, training sample generation, and model training and evaluation procedures using the F1 metric are described. The modeling results were compared to traditional visual interpretation methods using GIS tools. Experiments on the territory of the Khanty-Mansiysk Autonomous Okrug showed that the F1 accuracy of the different models ranged from 0.409 to 0.767, with the SegFormer transformer model achieving the highest performance and detecting felling missed by human interpretation. The processing time for a 100 × 100 km2 image pair was 15 minutes, 16 times faster than manual methods — an important factor for large-scale forest monitoring. The proposed SegFormer-based felling segmentation approach can be used for rapid detection and mapping of illegal logging. Further improvements could involve balancing the training dataset to include more diverse clearing shapes and sizes as well as incorporating partially cloudy images.</p></trans-abstract><kwd-group xml:lang="ru"><kwd>картографирование лесных рубок</kwd><kwd>космические снимки</kwd><kwd>глубокое машинное обучение</kwd><kwd>нейросетевые модели</kwd><kwd>сегментация изображений</kwd><kwd>мониторинг лесных территорий</kwd></kwd-group><kwd-group xml:lang="en"><kwd>felling mapping</kwd><kwd>satellite imagery</kwd><kwd>deep machine learning</kwd><kwd>neural network models</kwd><kwd>image segmentation</kwd><kwd>forest area monitoring</kwd></kwd-group></article-meta></front><back><ref-list><title>References</title><ref id="cit1"><label>1</label><citation-alternatives><mixed-citation xml:lang="ru">Габдрахманов Р.М., Кочергин Г.А., Куприянов М.А., Хамедов В.А., Шарафутдинов Р.Р. Реестр изменений лесного фонда ХМАО — Югры. Свидетельство о регистрации базы данных RU2016620648. 2016.</mixed-citation><mixed-citation xml:lang="en">Gabdrakhmanov R.M., Kochergin G.A., Kupriianov M.A., Khamedov V.A., Sharafutdinov R.R. Register of changes in the forest fund of Khanty-Mansiysk Autonomous Okrug — Yugra. Certificate of registration of the database RU2016620648, 2016.</mixed-citation></citation-alternatives></ref><ref id="cit2"><label>2</label><citation-alternatives><mixed-citation xml:lang="ru">Torres D.L., Turnes J.N., Soto Vega P.J., Feitosa R.Q., Silva D.E., Marcato Junior J., Almeida C. Deforestation detection with fully convolutional networks in the Amazon Forest from Landsat-8 and Sentinel-2 images // Remote Sensing. 2021. V. 13. N 24. P. 5084. https://doi.org/10.3390/rs13245084</mixed-citation><mixed-citation xml:lang="en">Torres D.L., Turnes J.N., Soto Vega P.J., Feitosa R.Q., Silva D.E., Marcato Junior J., Almeida C. Deforestation detection with fully convolutional networks in the Amazon Forest from Landsat-8 and Sentinel-2 images. Remote Sensing, 2021, vol. 13, no. 24, pp. 5084. https://doi.org/10.3390/rs13245084</mixed-citation></citation-alternatives></ref><ref id="cit3"><label>3</label><citation-alternatives><mixed-citation xml:lang="ru">Khan S.H., He X., Porikli F., Bennamoun M. Forest change detection in incomplete satellite images with deep neural networks // IEEE Transactions on Geoscience and Remote Sensing. 2017. V. 55. N 9. P. 5407–5423. https://doi.org/10.1109/tgrs.2017.2707528</mixed-citation><mixed-citation xml:lang="en">Khan S.H., He X., Porikli F., Bennamoun M. Forest change detection in incomplete satellite images with deep neural networks. IEEE Transactions on Geoscience and Remote Sensing, 2017, vol. 55, no. 9, pp. 5407–5423. https://doi.org/10.1109/tgrs.2017.2707528</mixed-citation></citation-alternatives></ref><ref id="cit4"><label>4</label><citation-alternatives><mixed-citation xml:lang="ru">John D., Zhang C. An attention-based U-Net for detecting deforestation within satellite sensor imagery // International Journal of Applied Earth Observation and Geoinformation. 2022. V. 107. P. 102685. https://doi.org/10.1016/j.jag.2022.102685</mixed-citation><mixed-citation xml:lang="en">John D., Zhang C. An attention-based U-Net for detecting deforestation within satellite sensor imagery. International Journal of Applied Earth Observation and Geoinformation, 2022, vol. 107, pp. 102685. https://doi.org/10.1016/j.jag.2022.102685</mixed-citation></citation-alternatives></ref><ref id="cit5"><label>5</label><citation-alternatives><mixed-citation xml:lang="ru">Podoprigorova N.S., Savchenko G.A., Rabcevich K.R., Kanev A.I., Tarasov A.V., Shikohov A.N. Forest damage segmentation using machine learning methods on satellite images // Studies in Computational Intelligence. 2023. V. 1120. P. 380–388. https://doi.org/10.1007/978-3-031-44865-2_41</mixed-citation><mixed-citation xml:lang="en">Podoprigorova N.S., Savchenko G.A., Rabcevich K.R., Kanev A.I., Tarasov A.V., Shikohov A.N. Forest damage segmentation using machine learning methods on satellite images. Studies in Computational Intelligence, 2023, vol. 1120, pp. 380–388. https://doi.org/10.1007/978-3-031-44865-2_41</mixed-citation></citation-alternatives></ref><ref id="cit6"><label>6</label><citation-alternatives><mixed-citation xml:lang="ru">Бычков И.В., Ружников Г.М., Федоров Р.К., Попова А.К., Авраменко Ю.В. Классификация космоснимков Sentinel-2 Байкальской природной территории // Компьютерная оптика. 2022. Т. 46. № 1. С. 90–96. https://doi.org/10.18287/2412-6179-co-1022</mixed-citation><mixed-citation xml:lang="en">Bychkov I.V., Ruzhnikov G.M., Fedorov R.K., Popova A.K., Avramenko Y.V. Classification of Sentinel-2 satellite images of the Baikal Natural Territory. Computer Optics, 2022, vol. 46, no. 1, pp. 90–96. (in Russian). https://doi.org/10.18287/2412-6179-co-1022</mixed-citation></citation-alternatives></ref><ref id="cit7"><label>7</label><citation-alternatives><mixed-citation xml:lang="ru">Мельников А.В., Кочергин Г.А., Аббазов В.Р., Байсалямова О.А., Русанов М.А., Полищук Ю.М. Нейросетевая модель для сегментации космических снимков в мониторинге факторов обезлесения территории // Вестник Южно-Уральского государственного университета. Серия: Компьютерные технологии, управление, радиоэлектроника. 2023. Т. 23. № 3. С. 5–15. https://doi.org/10.14529/ctcr230301</mixed-citation><mixed-citation xml:lang="en">Melnikov A.V., Kochergin G.A., Abbazov V.R., Baisalamova O.A., Rusanov M.A., Polishchuk Yu.M. A neural network model for space image segmentation in monitoring of deforestation factors. Bulletin of the South Ural State University. Series Computer Technology, Aotimatic Control, Radio Electronics, 2023, vol. 23, no. 3, pp. 5–15. (in Russian). https://doi.org/10.14529/ctcr230301</mixed-citation></citation-alternatives></ref><ref id="cit8"><label>8</label><citation-alternatives><mixed-citation xml:lang="ru">Main-Knorn M., Pflug B., Louis J., Debaecker V., Müller-Wilm U., Gascon F. Sen2Cor for Sentinel-2 // Proceedings of SPIE. 2017. V. 10427. P. 1042704. https://doi.org/10.1117/12.2278218</mixed-citation><mixed-citation xml:lang="en">Main-Knorn M., Pflug B., Louis J., Debaecker V., Müller-Wilm U., Gascon F. Sen2Cor for Sentinel-2. Proceedings of SPIE, 2017, vol. 10427, pp. 1042704. https://doi.org/10.1117/12.2278218</mixed-citation></citation-alternatives></ref><ref id="cit9"><label>9</label><citation-alternatives><mixed-citation xml:lang="ru">Garnot V.S.F., Landrieu L. Panoptic segmentation of satellite image time series with convolutional temporal attention networks // Proc. of the IEEE/CVF International Conference on Computer Vision (ICCV). 2021. P. 4852–4861. https://doi.org/10.1109/iccv48922.2021.00483</mixed-citation><mixed-citation xml:lang="en">Garnot V.S.F., Landrieu L. Panoptic segmentation of satellite image time series with convolutional temporal attention networks. Proc. of the IEEE/CVF International Conference on Computer Vision (ICCV), 2021, pp. 4852–4861. https://doi.org/10.1109/iccv48922.2021.00483</mixed-citation></citation-alternatives></ref><ref id="cit10"><label>10</label><citation-alternatives><mixed-citation xml:lang="ru">Rustowicz R., Cheong R., Wang L., Ermon S., Burke M., Lobell D. Semantic segmentation of crop type in Africa: A novel dataset and analysis of deep learning methods // Proc. of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR) Workshops. 2019. P. 75–82.</mixed-citation><mixed-citation xml:lang="en">Rustowicz R., Cheong R., Wang L., Ermon S., Burke M., Lobell D. Semantic segmentation of crop type in Africa: A novel dataset and analysis of deep learning methods. Proc. of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR) Workshop, 2019, pp. 75–82.</mixed-citation></citation-alternatives></ref><ref id="cit11"><label>11</label><citation-alternatives><mixed-citation xml:lang="ru">Fan T., Wang G., Li Y., Wang H. MA-Net: A multi-scale attention network for liver and tumor segmentation // IEEE Access. 2020. V. 8. P. 179656–179665. https://doi.org/10.1109/access.2020.3025372</mixed-citation><mixed-citation xml:lang="en">Fan T., Wang G., Li Y., Wang H. MA-Net: A multi-scale attention network for liver and tumor segmentation. IEEE Access, 2020, vol. 8, pp. 179656–179665. https://doi.org/10.1109/access.2020.3025372</mixed-citation></citation-alternatives></ref><ref id="cit12"><label>12</label><citation-alternatives><mixed-citation xml:lang="ru">Chamorro Martinez J.A., Cué La Rosa L.E., Feitosa R.Q., Sanches I.D., Happ P.N. Fully convolutional recurrent networks for multidate crop recognition from multitemporal image sequences // ISPRS Journal of Photogrammetry and Remote Sensing. 2021. V. 171. P. 188–201. https://doi.org/10.1016/j.isprsjprs.2020.11.007</mixed-citation><mixed-citation xml:lang="en">Chamorro Martinez J.A., Cué La Rosa L.E., Feitosa R.Q., Sanches I.D., Happ P.N. Fully convolutional recurrent networks for multidate crop recognition from multitemporal image sequences. ISPRS Journal of Photogrammetry and Remote Sensing, 2021, vol. 171, pp. 188–201. https://doi.org/10.1016/j.isprsjprs.2020.11.007</mixed-citation></citation-alternatives></ref><ref id="cit13"><label>13</label><citation-alternatives><mixed-citation xml:lang="ru">Shi X., Chen Z., Wang H., Yeung D.-Y., Wong W., Woo W. Convolutional LSTM network: A machine learning approach for precipitation nowcasting // arXiv. 2015. arXiv:1506.04214. https:// doi.org/10.48550/arXiv.1506.04214</mixed-citation><mixed-citation xml:lang="en">Shi X., Chen Z., Wang H., Yeung D.-Y., Wong W., Woo W. Convolutional LSTM network: A machine learning approach for precipitation nowcasting. arXiv, 2015, arXiv:1506.04214. https://doi.org/10.48550/arXiv.1506.04214</mixed-citation></citation-alternatives></ref><ref id="cit14"><label>14</label><citation-alternatives><mixed-citation xml:lang="ru">Xie E., Wang W., Yu Z., Anandkumar A., Alvarez J.M., Luo P. SegFormer: Simple and efficient design for semantic segmentation with transformers nowcasting // arXiv. 2021. arXiv:2105.15203. https://doi.org/10.48550/arXiv.2105.15203</mixed-citation><mixed-citation xml:lang="en">Xie E., Wang W., Yu Z., Anandkumar A., Alvarez J.M., Luo P. SegFormer: Simple and efficient design for semantic segmentation with transformers nowcasting. arXiv, 2021, arXiv:2105.15203. https://doi.org/10.48550/arXiv.2105.15203</mixed-citation></citation-alternatives></ref><ref id="cit15"><label>15</label><citation-alternatives><mixed-citation xml:lang="ru">Liu Z., Lin Y., Cao Y., Hu H., Wei Y., Zhang Z., Lin S., Guo B. Swin transformer: Hierarchical vision transformer using shifted Windows // Proc. of the IEEE/CVF International Conference on Computer Vision (ICCV). 2021. P. 9992–10002. https://doi.org/10.1109/iccv48922.2021.00986</mixed-citation><mixed-citation xml:lang="en">Liu Z., Lin Y., Cao Y., Hu H., Wei Y., Zhang Z., Lin S., Guo B. Swin transformer: Hierarchical vision transformer using shifted Windows. Proc. of the IEEE/CVF International Conference on Computer Vision (ICCV), 2021, pp. 9992–10002. https://doi.org/10.1109/iccv48922.2021.00986</mixed-citation></citation-alternatives></ref><ref id="cit16"><label>16</label><citation-alternatives><mixed-citation xml:lang="ru">Kruitwagen L. Towards DeepSentinel: An extensible corpus of labelled Sentinel-1 and -2 imagery and a general-purpose sensorfusion semantic embedding model // arXiv. 2021. arXiv:2102.06260. https://doi.org/10.48550/arXiv.2102.06260</mixed-citation><mixed-citation xml:lang="en">Kruitwagen L. Towards DeepSentinel: An extensible corpus of labelled Sentinel-1 and -2 imagery and a general-purpose sensorfusion semantic embedding model. arXiv, 2021, arXiv:2102.06260. https://doi.org/10.48550/arXiv.2102.06260</mixed-citation></citation-alternatives></ref><ref id="cit17"><label>17</label><citation-alternatives><mixed-citation xml:lang="ru">Betzalel E., Penso C., Navon A., Fetaya E. A study on the evaluation of generative models // arXiv. 2022. arXiv:2206.10935. https://doi.org/10.48550/arXiv.2206.10935</mixed-citation><mixed-citation xml:lang="en">Betzalel E., Penso C., Navon A., Fetaya E. A study on the evaluation of generative models. arXiv, 2022, arXiv:2206.10935. https://doi.org/10.48550/arXiv.2206.10935</mixed-citation></citation-alternatives></ref></ref-list><fn-group><fn fn-type="conflict"><p>The authors declare that there are no conflicts of interest present.</p></fn></fn-group></back></article>
