<?xml version="1.0" encoding="UTF-8"?>
<TEI xml:space="preserve" xmlns="http://www.tei-c.org/ns/1.0" 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
xsi:schemaLocation="http://www.tei-c.org/ns/1.0 https://raw.githubusercontent.com/kermitt2/grobid/master/grobid-home/schemas/xsd/Grobid.xsd"
 xmlns:xlink="http://www.w3.org/1999/xlink">
	<teiHeader xml:lang="en">
		<fileDesc>
			<titleStmt>
				<title level="a" type="main">High Resolution Environmental Monitoring of Pollinator Insects through Macro Camera Trapping and AI</title>
			</titleStmt>
			<publicationStmt>
				<publisher/>
				<availability status="unknown"><licence/></availability>
			</publicationStmt>
			<sourceDesc>
				<biblStruct>
					<analytic>
						<author>
							<persName><forename type="first">Mohammad</forename><surname>Sa'doun</surname></persName>
							<affiliation key="aff0">
								<orgName type="department">SIENA -Spatial Informatics for Environmental Applications</orgName>
								<orgName type="institution">Carinthia University of Applied Science</orgName>
								<address>
									<settlement>Villach</settlement>
									<country key="AT">Austria</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Daniel</forename><surname>Dalton</surname></persName>
							<affiliation key="aff1">
								<orgName type="institution" key="instit1">Carinthia University of Applied Sciences</orgName>
								<orgName type="institution" key="instit2">UNESCO Chair on Sustainable Management of Conservation Areas</orgName>
								<address>
									<settlement>Villach</settlement>
									<country key="AT">Austria</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Vanessa</forename><surname>Berger</surname></persName>
							<affiliation key="aff1">
								<orgName type="institution" key="instit1">Carinthia University of Applied Sciences</orgName>
								<orgName type="institution" key="instit2">UNESCO Chair on Sustainable Management of Conservation Areas</orgName>
								<address>
									<settlement>Villach</settlement>
									<country key="AT">Austria</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Gernot</forename><surname>Paulus</surname></persName>
							<affiliation key="aff0">
								<orgName type="department">SIENA -Spatial Informatics for Environmental Applications</orgName>
								<orgName type="institution">Carinthia University of Applied Science</orgName>
								<address>
									<settlement>Villach</settlement>
									<country key="AT">Austria</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Anders</forename><surname>Karl-Heinrich</surname></persName>
							<affiliation key="aff0">
								<orgName type="department">SIENA -Spatial Informatics for Environmental Applications</orgName>
								<orgName type="institution">Carinthia University of Applied Science</orgName>
								<address>
									<settlement>Villach</settlement>
									<country key="AT">Austria</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Ilja</forename><surname>Svetnik</surname></persName>
							<affiliation key="aff1">
								<orgName type="institution" key="instit1">Carinthia University of Applied Sciences</orgName>
								<orgName type="institution" key="instit2">UNESCO Chair on Sustainable Management of Conservation Areas</orgName>
								<address>
									<settlement>Villach</settlement>
									<country key="AT">Austria</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Johanna</forename><surname>Schultz</surname></persName>
							<affiliation key="aff1">
								<orgName type="institution" key="instit1">Carinthia University of Applied Sciences</orgName>
								<orgName type="institution" key="instit2">UNESCO Chair on Sustainable Management of Conservation Areas</orgName>
								<address>
									<settlement>Villach</settlement>
									<country key="AT">Austria</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Peter</forename><surname>Unglaub</surname></persName>
							<affiliation key="aff1">
								<orgName type="institution" key="instit1">Carinthia University of Applied Sciences</orgName>
								<orgName type="institution" key="instit2">UNESCO Chair on Sustainable Management of Conservation Areas</orgName>
								<address>
									<settlement>Villach</settlement>
									<country key="AT">Austria</country>
								</address>
							</affiliation>
						</author>
						<title level="a" type="main">High Resolution Environmental Monitoring of Pollinator Insects through Macro Camera Trapping and AI</title>
					</analytic>
					<monogr>
						<idno type="ISSN">1613-0073</idno>
					</monogr>
					<idno type="MD5">F1353945139C2CE5AA8CDCDE21E926D2</idno>
				</biblStruct>
			</sourceDesc>
		</fileDesc>
		<encodingDesc>
			<appInfo>
				<application version="0.7.2" ident="GROBID" when="2025-04-23T20:07+0000">
					<desc>GROBID - A machine learning software for extracting information from scholarly documents</desc>
					<ref target="https://github.com/kermitt2/grobid"/>
				</application>
			</appInfo>
		</encodingDesc>
		<profileDesc>
			<textClass>
				<keywords>
					<term>Pollination ecosystem service</term>
					<term>CNN</term>
					<term>YOLOv8</term>
					<term>Machine learning</term>
					<term>Insect identification</term>
					<term>Camera traps</term>
				</keywords>
			</textClass>
			<abstract>
<div xmlns="http://www.tei-c.org/ns/1.0"><p>Insect identification using camera traps has been neglected due to traditional sensors' inability to detect cold-blooded organisms. However, insects are essential for ecosystem services like pollination, making accurate detection crucial. This study uses advancements in computer vision, specifically convolutional neural networks (CNNs), to develop an automated insect identification algorithm. High-resolution time-lapse images from camera traps in the Austrian Alps were annotated and processed with LabelBox and YOLOv8, achieving order-level insect classification. Results showed a high diversity of insects with a peak mean Average Precision (mAP) of 54.23% at an Intersection over Union (IoU) threshold of 0.5, indicating the method's potential for precise species-level data collection. Despite challenges like background misclassification, the approach offers a robust framework for insect detection and valuable insights for refinement. YOLO-generated data enable comprehensive time series analysis, aiding in effective monitoring and management strategies. This study supports CNN-based methods in ecological monitoring, relevant for pest management and identifying key species.</p></div>
			</abstract>
		</profileDesc>
	</teiHeader>
	<text xml:lang="en">
		<body>
<div xmlns="http://www.tei-c.org/ns/1.0"><head n="1.">Introduction</head><p>Automatic identification of insects using camera traps has been largely neglected because most camera trap sensors work based on heat differential between the target animal and the surrounding environment. Insects, being cold-blooded, largely assume the temperature of their environment; therefore, this heat differential is negligible. However, insects play a critical role in the environment, and their detection and classification are crucial for understanding their impact. Recent advancements in computer vision technology, particularly convolutional neural networks (CNNs), have enabled accurate insect detection and classification. As a pilot action towards developing an automated insect identification algorithm supported by the FFG-funded project BioMONITec, we proceeded with a camera trapping design focused on pollinator insects. We gathered our own images using a time-lapse approach to document insect visitors on blooming flowers. Images were annotated using the data management platform LabelBox (LabelBox Inc., San Francisco, CA, USA) and processed for automated recognition through the opensource program YOLOv8 (Ultralytics Inc., Los Angeles, CA, USA), achieving order-level classification of insects. This approach aligns with other studies demonstrating the potential of CNNs in agricultural pest management. For instance, <ref type="bibr" target="#b0">[1]</ref> showed that YOLO architectures, including YOLOv5, could effectively detect and identify insect pests with high precision, which is essential for reducing pesticide use and promoting spot spraying. Furthermore, <ref type="bibr" target="#b1">[2]</ref> employed Faster R-CNN with inception ResNet v2 to detect and count pests in greenhouse tomato crops, achieving strong correlations between network detections and human observations. These studies highlight the importance of monitoring pests and the potential of deep learning to automate this process, making it more efficient and accurate. The integration of AI and automation in agriculture, including computer vision and other remote monitoring technologies, offers promising solutions for integrated pest management. Despite challenges such as detecting small 4th International Workshop on Camera Traps, AI, and Ecology, September 5 -6, 2024, Hagenberg, Austria Envelope m.sadoun@fh-kaernten.at (M. Sa'doun) insects and improving real-time detection accuracy, advancements in CNN-based methods are paving the way for broader applications, including the identification of economically and medically important species like pollinators and mosquitoes.</p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head n="2.">Materials and Methods</head><p>Our four-phase methodology included data acquisition, annotation, YOLOv8 training/validation and post processing. During the data acquisition phase, high-resolution camera traps are utilized to capture insects. Subsequently, the annotation phase involved annotating the acquired images using the LabelBox platform. Then, the output from the previous phases, comprising of the images and their corresponding labels, was fed into YOLO for training and validation purposes. The objective of this phase was to assess the efficacy of AI in detecting and accurately classifying insects. The last phase was the post-processing of AI detections where we performed a time series analysis to plot the occurrences of different insect types for a particular dataset.</p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head n="2.1.">Data acquisition and study sites</head><p>Waterproof outdoor digital cameras (WG-70, Ricoh Co., Ltd., Tokyo, Japan) were installed in Weinitzen and Sittersdorf (Carinthia), and St. Margarethen im Lungau (Salzburg) using tripods (Joby GorillaPod 1K, Vivendum Plc, Richmond, UK). Cameras targeted 10 blooming annual plant species over 13 dates ( Table <ref type="table" target="#tab_0">1</ref>). Plant species had a range of flower colors and phenology. These factors were assumed to impact the different types of insects over the season. Cameras utilized a macro setting at the highest photo quality (16MP) and were programmed to trigger at 30-to 45-second intervals. Devices exposed to intense sunlight occasionally shut down due to overheating, thus impacting the planned schedule of time lapse photo capture. </p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head n="2.2.">Image Annotation with LabelBox</head><p>In the current study, all visible parts of individual insects were fitted as tightly as possible within corresponding annotated bounding box. LabelBox was used as the annotation program that fed into YOLOv8 analysis. The process of drawing a bounding box is simply to click-and-drag over the object of interest. There are several important features that contribute to a good annotation in LabelBox, specifically in the context of bounding boxes. A good bounding box annotation should accurately capture the entire object of interest, without cutting off any parts or leaving out any areas. This is important for downstream tasks like object recognition and classification, as well as for visualizing the annotated objects. Additionally, a good annotation should have a clear and appropriate ratio of object to background. The bounding box should tightly surround the object of interest, while minimizing the inclusion of unnecessary background elements that can interfere with object recognition or classification. Furthermore, a good annotation should be consistent across all instances of a given object, as well as accurate in terms of location and size. Consistency is important for training machine learning models, as it helps to ensure that the model is able to recognize the same object across different images. Accuracy is crucial for ensuring that the annotated data is of high quality and can be used for downstream tasks with confidence. The data were converted in a pre-processing step into x-y coordinates with a calculated width and height of each object and saved as a small .txt file. Annotations were then coded, where each unique classification was assigned a number. Numbers corresponded to a mapping function where the classification was designated to an order of insect. Next, the annotations were randomly divided into 'training' and 'validation' data sets. To address the class imbalance and potential overfitting, we implemented a stratified random sampling technique for the train-validation split. This approach ensured that each insect class (Chelicerata, Coleoptera, Diptera, Hemiptera, Hymenoptera, and Lepidoptera) was represented proportionally in both sets. While we aimed to mitigate the imbalance, particularly between Diptera and Hemiptera, we acknowledge that the initial disparity might have persisted to some extent due to the nature of the data. For instance, multiple insect classes, especially Diptera, can often appear within the same image, making perfect class balance challenging. While the stratified sampling strategy helped to alleviate the impact of class imbalance to some extent, it's important to note that the model's performance might still be influenced by the inherent limitations of the dataset, particularly regarding underrepresented classes. The actual counts used in both training and validation are shown (Table <ref type="table" target="#tab_1">2</ref>). </p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head n="3.">Results</head></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head n="3.1.">YOLOv8</head><p>YOLO v8 was used in the pilot action. YOLO is an open-source object detection algorithm created by <ref type="bibr" target="#b2">[3]</ref> that can be modified and customized by developers to suit their specific needs. It takes a single pass over an image and can detect multiple objects at once. It uses a neural network that is trained on a large data set of labelled images, and it breaks down the image into a grid of cells. Each cell is responsible for predicting bounding boxes which are then classified into the categories of objects found in the dataset.</p><p>YOLO can accurately detect objects in an image even when the object is partially occluded. The model was trained for 100 epochs, peaking at epoch 88 with 54.23% mAP50 (Figure <ref type="figure" target="#fig_0">1</ref>) The validation process involves evaluating the model on the validation set and computing how close was the model predictions to the ground truth. The validation process is represented in the form of a confusion matrix (Figure <ref type="figure">2</ref>). Each cell in the confusion matrix is bounded by a color-coded border to represent different aspects of the model's performance. The matrix is organized based on normalized numbers related to the Ground Truth count of classes in the validation set. Green cells signify true positives, yellow cells indicate true negatives or misclassification, red cells represent undetected instances, and black cells denote background misclassified as insects. This confusion matrix offers a detailed breakdown of the model's detection and classification outcomes, providing valuable insights into its strengths and areas requiring improvement. Some examples of correctly classified insects are provided (Figure <ref type="figure">3</ref>).</p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head n="3.2.">Time Series Analysis</head><p>The analysis processes detection text files generated by YOLO, organizing the information into a table grouped by time detected and insect taxonomy. Subsequently, charts are generated to illustrate the frequency of insect sightings for each species at different times of the day (Figure <ref type="figure" target="#fig_2">4</ref>). This chart serves as a tool for understanding when various types of insects are most active, contributing valuable insights for insect monitoring and management strategies.</p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head n="4.">Discussion and Conclusion</head><p>Camera trapping for automated identification of insects is in its infancy, although some promising case studies exist (e.g., <ref type="bibr" target="#b3">[4]</ref>). Our study made significant strides towards advancing this goal, but much work remains. Our results show an impressive diversity of insects in a narrow window of time, considering only a single season of effort. We found a method to gather highly precise species-level data, allowing researchers and museum curators to showcase the recovered species to a wider audience in multiple ways. The well-defined data annotation process in LabelBox is a key contributor to the pipeline, ensuring the model is trained on accurately labelled data and promoting precise object detection. At the current status, while YOLO demonstrated a baseline level of performance in detecting and classifying insects, the achieved mean Average Precision (mAP) of 50% indicates room for improvement. The confusion matrix further highlights specific areas where the model struggled to differentiate between certain insect classes. It involves pre-processing to split the data into training and validation sets, training to minimize the loss function, and evaluation to assess performance. YOLOv8's advantages include quick detection and classification, flexibility, adaptability, and lightweight architecture. However, the model's tendency to detect background elements as insects, especially in complex backgrounds, highlights a challenge that needs fine-tuning in future iterations. Addressing this issue could enhance the model's precision and reliability. The structured data generated by YOLO allow for comprehensive time series analysis of temporal patterns in insect detection, providing insights into the temporal behavior of different species and aiding in developing informed monitoring and management strategies. In conclusion, the successful YOLO upgrade, effective data processing pipeline, and well-defined annotation process contribute to a robust framework for insect detection. The results allow for meaningful time series analysis and provide valuable insights for ongoing improvements to address challenges such as background detection. This ongoing refinement ensures that the model remains a powerful tool for accurate and insightful insect  </p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head n="5.">Outlook</head><p>For the next steps, we plan to complete the annotation phase in LabelBox to fully incorporate the data into the model. Following this, we will initiate the training and evaluation process. Throughout these stages, we will regularly monitor the model's performance, making any necessary updates and adjustments. Once these steps are successfully completed, we aim to deploy the model in a practical application for real-world use.</p></div><figure xmlns="http://www.tei-c.org/ns/1.0" xml:id="fig_0"><head>Figure 1 :</head><label>1</label><figDesc>Figure 1: Training process progress with mAP50 saturates at epoch 88.</figDesc><graphic coords="4,72.00,104.67,451.29,219.42" type="bitmap" /></figure>
<figure xmlns="http://www.tei-c.org/ns/1.0" xml:id="fig_1"><head>Figure 2 :Figure 3 :</head><label>23</label><figDesc>Figure 2: Confusion matrix. Green (Correct classification), Yellow (Misclassified), Red (Undetected), Black (Background detected as insects)</figDesc><graphic coords="5,128.41,65.61,338.47,254.73" type="bitmap" /></figure>
<figure xmlns="http://www.tei-c.org/ns/1.0" xml:id="fig_2"><head>Figure 4 :</head><label>4</label><figDesc>Figure 4: Bar chart time series analysis of the insect dataset.</figDesc><graphic coords="6,72.00,65.61,451.28,248.27" type="bitmap" /></figure>
<figure xmlns="http://www.tei-c.org/ns/1.0" xml:id="fig_3"><head></head><label></label><figDesc>monitoring and management.</figDesc></figure>
<figure xmlns="http://www.tei-c.org/ns/1.0" type="table" xml:id="tab_0"><head>Table 1</head><label>1</label><figDesc>Summary statistics on the number of images taken at specific host plants at the three study sites.</figDesc><table><row><cell cols="2">Camera Date</cell><cell>Location</cell><cell>Host</cell><cell cols="3">Start time End time Photos taken</cell></row><row><cell>1</cell><cell cols="3">09-May St. Margarethen Borago officinalis</cell><cell>09:50</cell><cell>19:19</cell><cell>1116</cell></row><row><cell>2</cell><cell cols="3">09-May St. Margarethen Borago officinalis</cell><cell>09:52</cell><cell>19:20</cell><cell>1116</cell></row><row><cell>3</cell><cell>26-Jun</cell><cell>Sittersdorf</cell><cell>Achillea millefolium</cell><cell>05:06</cell><cell>20:03</cell><cell>1001</cell></row><row><cell>4</cell><cell>26-Jun</cell><cell>Sittersdorf</cell><cell>Achillea millefolium</cell><cell>05:12</cell><cell>20:06</cell><cell>1001</cell></row><row><cell>5</cell><cell>26-Jun</cell><cell>Sittersdorf</cell><cell>Borago officinalis</cell><cell>05:06</cell><cell>20:03</cell><cell>1001</cell></row><row><cell>6</cell><cell>02-Jul</cell><cell cols="2">St. Margarethen Inula helenium</cell><cell>07:51</cell><cell>16:11</cell><cell>666</cell></row><row><cell>1</cell><cell>03-Jul</cell><cell cols="2">St. Margarethen Filipendula ulmaria</cell><cell>07:54</cell><cell>20:23</cell><cell>1000</cell></row><row><cell>5</cell><cell>03-Jul</cell><cell>Sittersdorf</cell><cell>Achillea millefolium</cell><cell>09:39</cell><cell>17:59</cell><cell>1000</cell></row><row><cell>6</cell><cell>09-Jul</cell><cell cols="2">St. Margarethen Inula helenium</cell><cell>09:02</cell><cell>17:21</cell><cell>1000</cell></row><row><cell>7</cell><cell>10-Jul</cell><cell cols="2">St. Margarethen Inula helenium</cell><cell>07:06</cell><cell>15:35</cell><cell>640</cell></row><row><cell>8</cell><cell>13-Jul</cell><cell>Weinitzen</cell><cell cols="2">Dianthus carthusianorum 09:06</cell><cell>21:35</cell><cell>1000</cell></row><row><cell>9</cell><cell>13-Jul</cell><cell>Weinitzen</cell><cell>Knautia arvensis</cell><cell>09:29</cell><cell>21:58</cell><cell>1000</cell></row><row><cell>1</cell><cell>17-Jul</cell><cell cols="2">St. Margarethen Inula helenium</cell><cell>10:54</cell><cell>19:14</cell><cell>1000</cell></row><row><cell>6</cell><cell>17-Jul</cell><cell cols="2">St. Margarethen Filipendula ulmaria</cell><cell>10:52</cell><cell>19:11</cell><cell>1000</cell></row><row><cell>7</cell><cell>17-Jul</cell><cell cols="2">St. Margarethen Inula helenium</cell><cell>10:57</cell><cell>19:16</cell><cell>1000</cell></row><row><cell>10</cell><cell>17-Jul</cell><cell cols="2">St. Margarethen Hypericum perforatum</cell><cell>11:01</cell><cell>19:20</cell><cell>1000</cell></row><row><cell>8</cell><cell>19-Jul</cell><cell>Weinitzen</cell><cell>Tragopogon pratensis</cell><cell>09:34</cell><cell>19:17</cell><cell>1000</cell></row><row><cell>9</cell><cell>20-Jul</cell><cell>Weinitzen</cell><cell>Oreoselinum majus</cell><cell>08:05</cell><cell>20:34</cell><cell>1000</cell></row><row><cell>9</cell><cell>27-Jul</cell><cell>Weinitzen</cell><cell>Prunella grandiflora</cell><cell>07:04</cell><cell>19:33</cell><cell>1000</cell></row><row><cell>8</cell><cell cols="2">02-Aug Weinitzen</cell><cell>Oreoselinum majus</cell><cell>09:24</cell><cell>19:07</cell><cell>1000</cell></row><row><cell>3</cell><cell cols="2">09-Aug Weinitzen</cell><cell>Oreoselinum majus</cell><cell>09:50</cell><cell>19:32</cell><cell>1000</cell></row><row><cell>5</cell><cell cols="2">09-Aug Weinitzen</cell><cell>Oreoselinum majus</cell><cell>09:50</cell><cell>19:32</cell><cell>1000</cell></row><row><cell>Total</cell><cell></cell><cell></cell><cell></cell><cell></cell><cell></cell><cell>24,280*</cell></row></table></figure>
<figure xmlns="http://www.tei-c.org/ns/1.0" type="table" xml:id="tab_1"><head>Table 2</head><label>2</label><figDesc>Training and validation sets sizes used in the study.</figDesc><table><row><cell>Order</cell><cell cols="3">Training count Validation count Total count</cell></row><row><cell>Helicerata</cell><cell>216</cell><cell>32</cell><cell>248</cell></row><row><cell>Coleoptera</cell><cell>261</cell><cell>43</cell><cell>304</cell></row><row><cell>Diptera</cell><cell>2874</cell><cell>523</cell><cell>3397</cell></row><row><cell>Hemiptera</cell><cell>200</cell><cell>37</cell><cell>237</cell></row><row><cell cols="2">Hymenoptera 343</cell><cell>64</cell><cell>307</cell></row><row><cell>Lepidoptera</cell><cell>215</cell><cell>31</cell><cell>246</cell></row></table></figure>
		</body>
		<back>

			<div type="acknowledgement">
<div xmlns="http://www.tei-c.org/ns/1.0"><head>Acknowledgements</head><p>This work was supported by the FFG COIN project Biodiversity Monitoring Technologies Test, Development and Transfer of disruptive engineering technologies into conservation practice (BioMONITec) and the Austrian Federal Ministry of Labour and Economy (BMAW). In this project an interdisciplinary team is creating technological foundations for developing autonomous biodiversity monitoring systems (BMS) that are tested experimentally for ecosystem research, ecology, ecofaunistics, and environmental genetics. The goal is to develop and publish technical and conceptual standards for BMS, addressing the global challenge of biodiversity conservation and the need for reliable data to guide policies and management measures.</p></div>
			</div>

			<div type="references">

				<listBibl>

<biblStruct xml:id="b0">
	<analytic>
		<title level="a" type="main">Deep learning based detector yolov5 for identifying insect pests</title>
		<author>
			<persName><forename type="first">I</forename><surname>Ahmad</surname></persName>
		</author>
		<author>
			<persName><forename type="first">Y</forename><surname>Yang</surname></persName>
		</author>
		<author>
			<persName><forename type="first">Y</forename><surname>Yue</surname></persName>
		</author>
		<author>
			<persName><forename type="first">C</forename><surname>Ye</surname></persName>
		</author>
		<author>
			<persName><forename type="first">M</forename><surname>Hassan</surname></persName>
		</author>
		<author>
			<persName><forename type="first">X</forename><surname>Cheng</surname></persName>
		</author>
		<author>
			<persName><forename type="first">Y</forename><surname>Wu</surname></persName>
		</author>
		<author>
			<persName><forename type="first">Y</forename><surname>Zhang</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="j">Applied Sciences</title>
		<imprint>
			<biblScope unit="volume">12</biblScope>
			<biblScope unit="page">10167</biblScope>
			<date type="published" when="2022">2022</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b1">
	<analytic>
		<title level="a" type="main">Detection and classification of insects on stick-traps in a tomato crop using faster r-cnn</title>
		<author>
			<persName><forename type="first">A</forename><surname>Nieuwenhuizen</surname></persName>
		</author>
		<author>
			<persName><forename type="first">J</forename><surname>Hemming</surname></persName>
		</author>
		<author>
			<persName><forename type="first">H</forename><forename type="middle">K</forename><surname>Suh</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="m">The Netherlands Conference on Computer Vision</title>
				<imprint>
			<date type="published" when="2018">2018</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b2">
	<analytic>
		<title level="a" type="main">You only look once: Unified, real-time object detection</title>
		<author>
			<persName><forename type="first">J</forename><surname>Redmon</surname></persName>
		</author>
		<author>
			<persName><forename type="first">S</forename><surname>Divvala</surname></persName>
		</author>
		<author>
			<persName><forename type="first">R</forename><surname>Girshick</surname></persName>
		</author>
		<author>
			<persName><forename type="first">A</forename><surname>Farhadi</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="m">Proceedings of the IEEE conference on computer vision and pattern recognition</title>
				<meeting>the IEEE conference on computer vision and pattern recognition</meeting>
		<imprint>
			<date type="published" when="2016">2016</date>
			<biblScope unit="page" from="779" to="788" />
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b3">
	<analytic>
		<title level="a" type="main">Moths complement bumblebee pollination of red clover: a case for day-and-night insect surveillance</title>
		<author>
			<persName><forename type="first">J</forename><surname>Alison</surname></persName>
		</author>
		<author>
			<persName><forename type="first">J</forename><forename type="middle">M</forename><surname>Alexander</surname></persName>
		</author>
		<author>
			<persName><forename type="first">N</forename><surname>Diaz Zeugin</surname></persName>
		</author>
		<author>
			<persName><forename type="first">Y</forename><forename type="middle">L</forename><surname>Dupont</surname></persName>
		</author>
		<author>
			<persName><forename type="first">E</forename><surname>Iseli</surname></persName>
		</author>
		<author>
			<persName><forename type="first">H</forename><forename type="middle">M</forename><surname>Mann</surname></persName>
		</author>
		<author>
			<persName><forename type="first">T</forename><forename type="middle">T</forename><surname>Høye</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="j">Biology Letters</title>
		<imprint>
			<biblScope unit="volume">18</biblScope>
			<biblScope unit="page">20220187</biblScope>
			<date type="published" when="2022">2022</date>
		</imprint>
	</monogr>
</biblStruct>

				</listBibl>
			</div>
		</back>
	</text>
</TEI>
