<?xml version="1.0" encoding="UTF-8"?>
<TEI xml:space="preserve" xmlns="http://www.tei-c.org/ns/1.0" 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
xsi:schemaLocation="http://www.tei-c.org/ns/1.0 https://raw.githubusercontent.com/kermitt2/grobid/master/grobid-home/schemas/xsd/Grobid.xsd"
 xmlns:xlink="http://www.w3.org/1999/xlink">
	<teiHeader xml:lang="en">
		<fileDesc>
			<titleStmt>
				<title level="a" type="main">Exploring multispectral reconstruction based on camera response prediction</title>
			</titleStmt>
			<publicationStmt>
				<publisher/>
				<availability status="unknown"><licence/></availability>
			</publicationStmt>
			<sourceDesc>
				<biblStruct>
					<analytic>
						<author>
							<persName><forename type="first">Jinxing</forename><surname>Liang</surname></persName>
							<email>jxliang@wtu.edu.cn</email>
							<affiliation key="aff0">
								<orgName type="department">School of Computer Science and Artificial Intelligence</orgName>
								<orgName type="institution">Wuhan Textile University</orgName>
								<address>
									<postCode>430200</postCode>
									<settlement>Hubei Wuhan</settlement>
									<country key="CN">China</country>
								</address>
							</affiliation>
							<affiliation key="aff1">
								<orgName type="department">Engineering Research Center of Hubei Province for Clothing Information</orgName>
								<orgName type="institution">Wuhan</orgName>
								<address>
									<postCode>430200</postCode>
									<settlement>Hubei</settlement>
									<country key="CN">China</country>
								</address>
							</affiliation>
							<affiliation key="aff3">
								<orgName type="department">School of Design</orgName>
								<orgName type="institution">University of Leeds</orgName>
								<address>
									<postCode>LS2 9JT</postCode>
									<settlement>Leeds</settlement>
									<country key="GB">UK</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Xinrong</forename><surname>Hu</surname></persName>
							<affiliation key="aff0">
								<orgName type="department">School of Computer Science and Artificial Intelligence</orgName>
								<orgName type="institution">Wuhan Textile University</orgName>
								<address>
									<postCode>430200</postCode>
									<settlement>Hubei Wuhan</settlement>
									<country key="CN">China</country>
								</address>
							</affiliation>
							<affiliation key="aff0">
								<orgName type="department">School of Computer Science and Artificial Intelligence</orgName>
								<orgName type="institution">Wuhan Textile University</orgName>
								<address>
									<postCode>430200</postCode>
									<settlement>Hubei Wuhan</settlement>
									<country key="CN">China</country>
								</address>
							</affiliation>
							<affiliation key="aff1">
								<orgName type="department">Engineering Research Center of Hubei Province for Clothing Information</orgName>
								<orgName type="institution">Wuhan</orgName>
								<address>
									<postCode>430200</postCode>
									<settlement>Hubei</settlement>
									<country key="CN">China</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Zhuan</forename><surname>Zuo</surname></persName>
							<affiliation key="aff0">
								<orgName type="department">School of Computer Science and Artificial Intelligence</orgName>
								<orgName type="institution">Wuhan Textile University</orgName>
								<address>
									<postCode>430200</postCode>
									<settlement>Hubei Wuhan</settlement>
									<country key="CN">China</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Xiao</forename><surname>Liu</surname></persName>
							<affiliation key="aff2">
								<orgName type="institution">Wuhan Geomatics Institute</orgName>
								<address>
									<postCode>430022</postCode>
									<settlement>Hubei Wuhan</settlement>
									<country key="CN">China</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Yifan</forename><surname>Li</surname></persName>
							<affiliation key="aff0">
								<orgName type="department">School of Computer Science and Artificial Intelligence</orgName>
								<orgName type="institution">Wuhan Textile University</orgName>
								<address>
									<postCode>430200</postCode>
									<settlement>Hubei Wuhan</settlement>
									<country key="CN">China</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Wensen</forename><surname>Zhou</surname></persName>
							<affiliation key="aff0">
								<orgName type="department">School of Computer Science and Artificial Intelligence</orgName>
								<orgName type="institution">Wuhan Textile University</orgName>
								<address>
									<postCode>430200</postCode>
									<settlement>Hubei Wuhan</settlement>
									<country key="CN">China</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Hang</forename><surname>Luo</surname></persName>
							<affiliation key="aff0">
								<orgName type="department">School of Computer Science and Artificial Intelligence</orgName>
								<orgName type="institution">Wuhan Textile University</orgName>
								<address>
									<postCode>430200</postCode>
									<settlement>Hubei Wuhan</settlement>
									<country key="CN">China</country>
								</address>
							</affiliation>
							<affiliation key="aff1">
								<orgName type="department">Engineering Research Center of Hubei Province for Clothing Information</orgName>
								<orgName type="institution">Wuhan</orgName>
								<address>
									<postCode>430200</postCode>
									<settlement>Hubei</settlement>
									<country key="CN">China</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Kaida</forename><surname>Xiao</surname></persName>
							<email>k.xiao1@leeds.ac.uk</email>
							<affiliation key="aff3">
								<orgName type="department">School of Design</orgName>
								<orgName type="institution">University of Leeds</orgName>
								<address>
									<postCode>LS2 9JT</postCode>
									<settlement>Leeds</settlement>
									<country key="GB">UK</country>
								</address>
							</affiliation>
						</author>
						<title level="a" type="main">Exploring multispectral reconstruction based on camera response prediction</title>
					</analytic>
					<monogr>
						<idno type="ISSN">1613-0073</idno>
					</monogr>
					<idno type="MD5">EE4A6C6350A3620A2B4A29EBBD6E719F</idno>
				</biblStruct>
			</sourceDesc>
		</fileDesc>
		<encodingDesc>
			<appInfo>
				<application version="0.7.2" ident="GROBID" when="2025-04-23T16:58+0000">
					<desc>GROBID - A machine learning software for extracting information from scholarly documents</desc>
					<ref target="https://github.com/kermitt2/grobid"/>
				</application>
			</appInfo>
		</encodingDesc>
		<profileDesc>
			<textClass>
				<keywords>
					<term>spectral reconstruction</term>
					<term>digital Camera</term>
					<term>imaging model</term>
					<term>raw response prediction 1</term>
				</keywords>
			</textClass>
			<abstract>
<div xmlns="http://www.tei-c.org/ns/1.0"><p>Spectral reconstruction based on digital imaging has become an important way for obtaining spectral images with high spatial-resolution. Current research has made great achievements in the laboratory; however, dealing with rapidly changing light sources, illumination, and imaging parameters in an open environment presents significant challenges for spectral reconstruction. This is because a spectral reconstruction model established under one set of imaging conditions is not suitable for use under different imaging conditions. To deal with the challenges, in this study, we explored the feasibility of spectral reconstruction based on camera raw response prediction. In the proposed method, the camera raw response of the training dataset under specific imaging conditions is first predicted via the camera imaging model, then the spectral reconstruction algorithm is applied to spectrally characterize the digital camera based on the training dataset, at last, the spectral reflectance of testing target is reconstructed from the captured image under the same imaging condition. The performance of the proposed method (M2) is tested and compared with the traditional way (M1) which uses captured training dataset to reconstruct capture target. Results show the performance of M2 is slightly inferior to M1 but it still in a relatively good reconstruction accuracy. In addition, we find the proposed method is sensitive to the spectral reconstruction algorithms used in it, and different algorithms have different performances in spectral and chromaticity aspects.</p></div>
			</abstract>
		</profileDesc>
	</teiHeader>
	<text xml:lang="en">
		<body>
<div xmlns="http://www.tei-c.org/ns/1.0"><head n="1.">Introduction</head><p>Spectral reflectance is not only the 'fingerprint' of color but also an important feature to describe the physical properties of object, therefore, it has been widely used in the filed of highfidelity color reproduction and material analysis. Multispectral reconstruction is one of the important techniques to acquire spectral images with high-spatial resolutions <ref type="bibr" target="#b0">[1,</ref><ref type="bibr" target="#b1">2,</ref><ref type="bibr" target="#b2">3,</ref><ref type="bibr" target="#b3">4]</ref>, it can overcome the application limitation of spectrophotometers that can only perform single-point measurements. Also, compared with multispectral cameras, it can further improve the spatial resolution of spectral images and reduce the cost of hardware systems.</p><p>Multispectral reconstruction has made significant progress in the laboratory. However, due to the sensitivity of the spectral reconstruction model to changes in imaging conditions <ref type="bibr" target="#b0">[1]</ref>, it still faces many challenges when applied in open environments. As shown in Figure <ref type="figure" target="#fig_0">1</ref>, when a spectral reconstruction matrix developed under the CIED65 is applied under the CIEA, the reconstructed spectral root-mean-square error (RMSE) increased from 2.87% to 41.32%, indicating that spectral reconstruction is sensitive to changes in light sources. In addition, changes in the illuminance and imaging parameters, such as exposure time and ISO, can cause variations in imaging conditions. Therefore, when dealing with an open environment where imaging conditions are constantly changing, it is crucial to develop a new spectral reconstruction method. In this study, we explored the feasibility of spectral reconstruction based on camera raw response prediction. In the proposed method, the camera raw response of the training dataset under specific imaging conditions is first predicted using the camera imaging model <ref type="bibr" target="#b11">[12,</ref><ref type="bibr" target="#b12">13,</ref><ref type="bibr" target="#b13">14]</ref>, and then the spectral reconstruction algorithm is applied to spectral characterize the digital camera based on the training dataset. At last, the spectral reflectance of the target object is reconstructed from the captured image under the same imaging condition. Theoretically speaking, the proposed method has the potential to achieve real-time spectral characterization of the camera and facilitate the practical applications of multispectral reconstruction technology in diverse open environments. Research results indicate that the proposed method (M2) can achieve satisfactory accuracy across different exposure and ISO settings, yet there remains an improvement space when compared to the result that uses captured training samples to reconstruct the captured target (M1).</p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head n="2.">Camera raw response prediction</head><p>For digital cameras, it is a complex workflow from radiant energy from scene to final visual pleased images <ref type="bibr" target="#b11">[12,</ref><ref type="bibr" target="#b12">13]</ref>. In this study, we research on camera raw response prediction-based multispectral reconstruction, as the raw response is linearized data and easier to predict than the post-processed digital values <ref type="bibr" target="#b13">[14]</ref>. The prediction model for linearized camera raw response is described below.</p><p>First, the radiation spectrum of the light source is irradiated on the surface of the object. After absorption and reflection by the object, the radiation spectrum image of the object is obtained, as shown in Equation ( <ref type="formula" target="#formula_0">1</ref>):</p><formula xml:id="formula_0">(, , ) = (, , )() ,<label>(1)</label></formula><p>where is the wavelength, (, , ) is the spectral reflectance of the pixels in the image, () denotes the relative spectral radiance of the light source, and (, , ) denotes the spectral radiance at each pixel. Subsequently, the radiation spectrum from the scene, after passing through the camera lens, forms an optical irradiance image before it enters the sensor <ref type="bibr" target="#b12">[13]</ref>. This can be expressed as shown in Equation ( <ref type="formula" target="#formula_1">2</ref>):</p><formula xml:id="formula_1">, , ≅ 1 + 4 /# 2 (1 + ) 2 , , ,<label>(2)</label></formula><p>where /# is the f-number of the lens, is the magnification of the lens, is the transmittance of the lens, (, , ) is the spectral radiance at each pixel, and , , denotes the irradiance at each pixel. Furthermore, the scene irradiance image enters the sensor, which undergoes photoelectric conversion and analog-to-digital conversion to obtain a mosaic image in the Bayer pattern. The demosaicing algorithm is then applied to get the raw format image of the scene captured by the camera. These specific steps are shown in Equations ( <ref type="formula" target="#formula_2">3</ref>) to <ref type="bibr" target="#b5">(6)</ref>.</p><formula xml:id="formula_2">, = , , ,<label>(3)</label></formula><p>where Ω represents the spectral wavelength range, which is taken from 400 to 700nm in this study, is the camera spectral sensitivity function, (, ) represents the photoelectrons conversion efficiency at position (, ). Therefore, the total photo-electric conversion (, ) by the camera sensor over a given exposure time can be defined as:</p><formula xml:id="formula_3">(, ) = (, ) ,<label>(4)</label></formula><p>where represents the exposure time, is the sensitivity function, which can usually be obtained by dividing the ISO value by 100. When the spectral radiance and the sensor's quantum efficiency remain constant during a single exposure, it can be further simplified to the product form as shown in Equation ( <ref type="formula" target="#formula_4">5</ref>):</p><formula xml:id="formula_4">(, ) = (, ),<label>(5)</label></formula><p>According to Equation (3) and Equation ( <ref type="formula" target="#formula_4">5</ref>), the raw response value at any pixel in the image can be expressed in the form shown in Equation ( <ref type="formula" target="#formula_5">6</ref>):</p><formula xml:id="formula_5">(, ) = 1 + 4 /# 2 (1 + ) 2 , , ,<label>(6)</label></formula><p>Finally, considering the nonlinear relationship between the irradiance of the camera sensor and the camera's readout response <ref type="bibr" target="#b13">[14]</ref>, the model further introduces three constants, 1 , 2 , and to characterize the non-equivalence between exposure and ISO, thus yielding the final expression for the raw response (, ) of a pixel.</p><formula xml:id="formula_6">(, ) = [( 1 + 4 /# 2 (1 + ) 2 , , + 1 ) + 2 ],<label>(7)</label></formula><p>This study uses the camera imaging model shown in Equation <ref type="bibr" target="#b6">(7)</ref> to predict the color card raw response under set imaging conditions to support the multispectral reconstruction research based on camera raw response prediction.</p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head n="3.">Experimental</head><p>In the experiment, we used the ColorChecker SG140 (CCSG) and ColorChecker 24 (CC) color charts as training samples and testing target respectively. The color charts were captured in a closed and nearly uniform illuminated lightbox with a Nikon D7200 digital camera, keeping the color charts located at the center of the camera's field of view (as shown in Figure <ref type="figure" target="#fig_1">2</ref>). The color chart plane was approximately one meter away from the camera sensor plane, and the focal length was set as 35 mm. Five different combinations of exposure and ISO were set to capture the color charts. The settings of five exposure and ISO are shown in Table <ref type="table" target="#tab_0">1</ref>. For each setting, the white patches in the color charts were not overexposed, ensuring the validity of the data.  The camera sensitivity functions of Nikon D7200 were estimated by Jiang's method and CC color chart <ref type="bibr" target="#b14">[15,</ref><ref type="bibr" target="#b15">16]</ref>, where the CC color chart is used to implement the camera sensitivity estimation method of Jiang et al <ref type="bibr" target="#b14">[15]</ref>, and the result is plotted in Figure <ref type="figure" target="#fig_2">3</ref>(a). The spectral power distribution of the light source was measured using the EVERFINE spectroradiometer of SPIC-300AW, and the spectral power distribution of the light source in the lightbox is plotted in Figure <ref type="figure" target="#fig_2">3(b)</ref>. The spectral reflectance of the color charts was measured using the spectrophotometer of X-rite i1-Pro3. With the spectral reflectance, the spectral sensitivity function, the spectral power distribution, and the imaging setting of each group, we can predict the camera raw response of CCSG using the prediction model described in section 2. Furthermore, the nonlinear parameters were estimated using the proposed curve fitting methods in reference <ref type="bibr" target="#b13">[14]</ref>, as shown in Equation ( <ref type="formula" target="#formula_7">8</ref>):</p><formula xml:id="formula_7">= 0 + 1 + 2 ,<label>(8)</label></formula><p>where represents the captured camera raw response of the color chart, 0 represents the predicted camera raw response using the prediction model, and is the sensitivity function as described in Equation ( <ref type="formula" target="#formula_2">3</ref>). The captured camera raw response is extracted with the help of Dcraw and a self-developed GUI interface in Matlab. In the spectral reconstruction stage, two types and four spectral algorithms of Liang, OLS, Cao, and Kernel are selected in the experiment <ref type="bibr" target="#b16">[17,</ref><ref type="bibr" target="#b17">18,</ref><ref type="bibr" target="#b18">19,</ref><ref type="bibr" target="#b19">20]</ref>, where the former two methods are regression-based and the latter two are interpolation-based. We use 'M1' to represent the situation of using captured training dataset to reconstruct the captured testing target, and use 'M2' to represent the situation of using predicted training dataset to reconstruct the captured testing target. The training dataset in this study is the CCSG color chart, and the testing target is the CC color chart.</p><p>In addition, we use CIEDE2000 color difference (∆E00) to evaluate the accuracy of camera raw response prediction, and use the root-mean-square error (RMSE) and CIEDE2000 color difference (∆E00) to evaluate the accuracy of spectral reconstruction. The method to calculate the RMSE is in Equation ( <ref type="formula" target="#formula_8">9</ref>):</p><formula xml:id="formula_8">RMSE = 1 1 − 2 1 − 2 ,<label>(9)</label></formula><p>where 1 denotes the measured spectral reflectance using the spectrophotometer, 2 represents the reconstructed spectral reflectance, is the transpose operator, and is the number of sampled wavelengths, which is 31 in this study.</p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head n="4.">Result and analysis</head></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head n="4.1.">Camera raw response prediction</head><p>Under the setting imaging conditions, we first check the camera raw response prediction results for each group of settings. The color difference of CIEDE2000 (∆E00) between the predicted and captured camera raw response for CCSG and CC are calculated and summarized in Table <ref type="table">2</ref>, respectively. It should be noted that since there is a certain degree of amplitude deviation between the predicted raw response of the color chart and the captured raw response, the max-min normalization is performed on the predicted raw response and the captured raw response before calculating the color difference ∆E00.</p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head>Table 2</head><p>The color difference of CIEDE2000 between the predicted and captured camera raw response for CCSG and CC under five group of settings.</p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head>∆E00</head><p>CCSG It can be seen from Table <ref type="table">2</ref> that for the color chart CCSG, the average color difference ∆E00 for camera raw response prediction under five groups of settings is 1.81, and except for Group 5, the prediction ∆E00 for each group of settings is around the average color difference. The predicted difference between Group 5 and the other 4 groups may caused by the nonequivalence caused by the combination of exposure and ISO. For the color chart CC, the predicted color difference is bigger than the CCSG, which is around the mean ∆E00 of 3.22 of the five groups. The reason behind this may be the parameters estimated in the prediction model are carried out with the CCSG color chart. In order to more intuitively compare the predicted raw response and the captured raw response, the distribution of the predicted and captured raw responses are plotted in Figure <ref type="figure" target="#fig_3">4</ref>. The results from Figure <ref type="figure" target="#fig_4">5</ref> indicate that the distribution of the predicted camera raw responses for both color charts of CCSG and CC closely overlaps with the distribution of the captured raw responses with minor deviations observed in certain individual colors, which means relatively good prediction results of the proposed method..</p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head n="4.2.">Response prediction-based spectral reconstruction</head><p>In this section, we present the results of camera raw response prediction-based spectral reconstruction. Both the camera raw response of CCSG and CC color charts have been predicted under different imaging settings to check the prediction accuracy. But for prediction-based spectral reconstruction, using the CCSG as a training dataset and the CC as a testing target, we compared the spectral reconstruction accuracy in two different situations by four and two types of different spectral reconstruction algorithms. In the first situation (M1), we use the captured raw response of CCSG to reconstruct the spectral reflectance of captured raw CC. In the second situation (M2), we use the predicted raw response of CCSG to reconstruct the spectral reflectance of captured raw CC. The selected spectral reconstruction methods are Liang, OLS, Cao, and Kernel <ref type="bibr" target="#b16">[17,</ref><ref type="bibr" target="#b17">18,</ref><ref type="bibr" target="#b18">19,</ref><ref type="bibr" target="#b19">20]</ref>. The spectra reconstruction accuracy is evaluated by RMSE and ∆E00, respectively. The results of M1 and M2 under five different groups of imaging settings are summarized in Table <ref type="table" target="#tab_2">3 and Table 4</ref>, respectively. And the box plots of spectral reconstruction errors in Table <ref type="table">3</ref> and Table <ref type="table" target="#tab_2">4</ref> are plotted in Figure <ref type="figure" target="#fig_4">5</ref>.</p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head>Table 3</head><p>The average spectral reconstruction errors of M1 with four algorithms under five different groups of imaging settings.  From Table <ref type="table">3</ref>, Table <ref type="table" target="#tab_2">4</ref>, and Figure <ref type="figure" target="#fig_4">5</ref>, it can be observed that for using the captured training dataset to reconstruct the captured testing target (M1), the spectral reconstruction errors are relatively good according to the experience. However, there are certain differences in spectral reconstruction errors between different algorithms. Among them, Liang's method generally shows the best spectral reconstruction for both RMSE and ∆E00, and Cao's method shows the worst colorimetric accuracy. For using the predicted training dataset to reconstruct the captured testing target (M2), Liang's method shows the highest RMSE of 5.73%, while Cao's method shows the lowest RMSE of 4.02%, but for color difference, the results of the two methods are completely opposite, where Liang's method shows the lowest ∆E00 of 3.53, while Cao's method shows the highest ∆E00 of 5.30.</p><p>The results in Table <ref type="table">3</ref>, Table <ref type="table" target="#tab_2">4</ref>, and Figure <ref type="figure" target="#fig_4">5</ref> illustrated that the performance of M2 is slightly inferior to M1, but it still in a relatively good reconstruction accuracy. In addition, it easy to infer that the proposed camera raw response prediction based method is sensitive to the spectral reconstruction algorithms used in it, and different algorithms have different performances in spectral and chromaticity aspects. To further inspection the spectral reconstructed results, several spectral reflectance reconstructed by Cao's method under M1 and M2 are compared with the measured results in Figure <ref type="figure" target="#fig_5">6</ref>. The results in Figure <ref type="figure" target="#fig_5">6</ref> show that the spectral reflectance reconstructed by Cao's method under M1 and M2 overlaps and closely matches the measured data. This demonstrates the potential of the proposed method to address the dependency of spectral reconstruction techniques on imaging conditions. Overall, the experimental results in this study highlight the potential of the proposed method to achieve real-time spectral characterization of the camera and facilitate practical applications of multispectral reconstruction technology in open environments.</p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head n="5.">Conclusions</head><p>This study introduces a novel approach to spectral reconstruction based on camera raw response prediction to address the challenges posed by rapidly changing light sources, illumination, and imaging parameters in an open environment. The method involves predicting the camera raw response of the training dataset based on the digital imaging model, characterizing the camera spectrally, and reconstructing the spectral reflectance of the testing target using the predicted response and spectral reflectance of the training dataset. The proposed method differs from existing research by correcting imaging conditions from an open environment to a laboratory via illumination estimation. This method is more flexible for practical applications. The experimental results have proven the effectiveness of this method. In future research, the validation of the proposed method under different open environments will be conducted to further investigate the feasibility of its performance. In addition, we will conduct further optimization research on the proposed method and develop an integrated application system.</p></div><figure xmlns="http://www.tei-c.org/ns/1.0" xml:id="fig_0"><head>Figure 1 :</head><label>1</label><figDesc>Figure 1: An example of spectral reconstruction being sensitive to changes in imaging conditions of light source. In recent years, researchers have been exploring ways to address the challenges of applying multispectral reconstruction in open environments. Shrestha et al. used a binocular camera and estimated the spectral power distribution of the light source by adding a broadband filter in front of one of the lenses. They then used this information to characterize the camera and reconstruct the spectral reflectance of the target [5]. Inspired by the color constancy theory, Khan et al. proposed the spectral adaptation transformation (SAT) for multispectral constancy [6, 7]. Finlayson and Liang et al. found that existing spectral reconstruction algorithms are sensitive to exposure level changes, leading to deviations of reconstructed spectral curve shape from the ground truth, and proposed the solutions [1, 8, 9, 10, 11]. Although the above research has achieved preliminary results, there is still a certain gap between theory study and practical applications in open environments.In this study, we explored the feasibility of spectral reconstruction based on camera raw response prediction. In the proposed method, the camera raw response of the training dataset under specific imaging conditions is first predicted using the camera imaging model<ref type="bibr" target="#b11">[12,</ref><ref type="bibr" target="#b12">13,</ref><ref type="bibr" target="#b13">14]</ref>, and then the spectral reconstruction algorithm is applied to spectral characterize the digital camera based on the training dataset. At last, the spectral reflectance of the target object is reconstructed from the captured image under the same imaging condition. Theoretically speaking, the proposed method has the potential to achieve real-time spectral characterization of the camera and facilitate the practical applications of multispectral reconstruction technology in diverse open environments. Research results indicate that the proposed method (M2) can achieve satisfactory accuracy across different exposure and ISO settings, yet</figDesc><graphic coords="2,87.36,119.04,425.16,289.44" type="bitmap" /></figure>
<figure xmlns="http://www.tei-c.org/ns/1.0" xml:id="fig_1"><head>Figure 2 :</head><label>2</label><figDesc>Figure 2: Captured images of the ColorChecker SG140 color chart (left) and ColorChecker 24 color chart (right) under setting exposure 1/25s and ISO 100.</figDesc><graphic coords="4,299.95,394.20,209.45,143.35" type="bitmap" /></figure>
<figure xmlns="http://www.tei-c.org/ns/1.0" xml:id="fig_2"><head>Figure 3 :</head><label>3</label><figDesc>Figure 3: (a) The spectral sensitivity function distribution of Nikon D7200, and (b) the relative spectral power distribution of light source in lightbox.</figDesc><graphic coords="5,101.28,267.36,198.48,154.44" type="bitmap" /></figure>
<figure xmlns="http://www.tei-c.org/ns/1.0" xml:id="fig_3"><head>Figure 4 :</head><label>4</label><figDesc>Figure 4: The distribution of the predicted and captured raw responses, (a) the predicted and capture raw responses of CCSG, (b) the predicted and capture raw responses of CC.</figDesc><graphic coords="6,101.28,545.40,198.48,154.44" type="bitmap" /></figure>
<figure xmlns="http://www.tei-c.org/ns/1.0" xml:id="fig_4"><head>Figure 5 :</head><label>5</label><figDesc>Figure 5: Box plots of the spectral reconstruction errors, (a) RMSE of M1 with four algorithms, (b) ∆E00 of M1 with four algorithms, (c) RMSE of M2 with four algorithms, (d) ∆E00 of M2 with four algorithms.</figDesc><graphic coords="8,101.28,217.32,198.48,154.44" type="bitmap" /></figure>
<figure xmlns="http://www.tei-c.org/ns/1.0" xml:id="fig_5"><head>Figure 6 :</head><label>6</label><figDesc>Figure 6: Compared the measured and several reconstructed spectral reflectance by Cao's method under M1 and M2: (a) 11 th color patch in CC, (b) 16 th color patch in CC, (c) 18 th color patch in CC, (d) 19 th color patch in CC.</figDesc><graphic coords="9,101.28,217.32,198.48,154.44" type="bitmap" /></figure>
<figure xmlns="http://www.tei-c.org/ns/1.0" type="table" xml:id="tab_0"><head>Table 1</head><label>1</label><figDesc>Five groups of exposure settings in the experiment</figDesc><table><row><cell>Setting</cell><cell>Exposure/s</cell><cell>ISO</cell></row><row><cell>Group 1</cell><cell>1/25</cell><cell>100</cell></row><row><cell>Group 2</cell><cell>1/30</cell><cell>200</cell></row><row><cell>Group 3</cell><cell>1/60</cell><cell>400</cell></row><row><cell>Group 4</cell><cell>1/125</cell><cell>800</cell></row><row><cell>Group 5</cell><cell>1/250</cell><cell>1600</cell></row></table></figure>
<figure xmlns="http://www.tei-c.org/ns/1.0" type="table" xml:id="tab_2"><head>Table 4</head><label>4</label><figDesc>The average spectral reconstruction errors of M2 with four algorithms under five different groups of imaging settings.</figDesc><table><row><cell></cell><cell>Liang</cell><cell></cell><cell>OLS</cell><cell></cell><cell>Cao</cell><cell></cell><cell>Kernel</cell><cell></cell></row><row><cell></cell><cell>RMSE(%)</cell><cell cols="2">∆E00 RMSE(%)</cell><cell>∆E00</cell><cell>RMSE(%)</cell><cell>∆E00</cell><cell>RMSE(%)</cell><cell>∆E00</cell></row><row><cell>Group 1</cell><cell>2.30</cell><cell>2.17</cell><cell>3.04</cell><cell>2.27</cell><cell>2.97</cell><cell>4.39</cell><cell>3.23</cell><cell>2.50</cell></row><row><cell>Group 2</cell><cell>2.47</cell><cell>2.49</cell><cell>3.19</cell><cell>2.52</cell><cell>2.93</cell><cell>4.46</cell><cell>3.30</cell><cell>2.67</cell></row><row><cell>Group 3</cell><cell>2.38</cell><cell>2.20</cell><cell>3.01</cell><cell>2.29</cell><cell>2.63</cell><cell>3.78</cell><cell>3.14</cell><cell>2.46</cell></row><row><cell>Group 4</cell><cell>2.75</cell><cell>2.76</cell><cell>3.54</cell><cell>2.84</cell><cell>3.06</cell><cell>4.59</cell><cell>3.68</cell><cell>3.00</cell></row><row><cell>Group 5</cell><cell>3.24</cell><cell>3.18</cell><cell>3.84</cell><cell>3.2</cell><cell>3.51</cell><cell>4.93</cell><cell>4.04</cell><cell>3.34</cell></row><row><cell>Average</cell><cell>2.63</cell><cell>2.56</cell><cell>3.32</cell><cell>2.62</cell><cell>3.02</cell><cell>4.43</cell><cell>3.48</cell><cell>2.79</cell></row><row><cell></cell><cell>Liang</cell><cell></cell><cell>OLS</cell><cell></cell><cell>Cao</cell><cell></cell><cell>Kernel</cell><cell></cell></row><row><cell></cell><cell>RMSE(%)</cell><cell cols="2">∆E00 RMSE(%)</cell><cell>∆E00</cell><cell>RMSE(%)</cell><cell>∆E00</cell><cell>RMSE(%)</cell><cell>∆E00</cell></row><row><cell>Group 1</cell><cell>5.44</cell><cell>3.35</cell><cell>5.26</cell><cell>3.54</cell><cell>3.83</cell><cell>5.07</cell><cell>4.73</cell><cell>3.39</cell></row><row><cell>Group 2</cell><cell>5.68</cell><cell>3.46</cell><cell>5.41</cell><cell>3.65</cell><cell>3.97</cell><cell>5.40</cell><cell>4.88</cell><cell>3.49</cell></row><row><cell>Group 3</cell><cell>5.53</cell><cell>3.36</cell><cell>5.24</cell><cell>3.55</cell><cell>3.83</cell><cell>5.15</cell><cell>4.70</cell><cell>3.39</cell></row><row><cell>Group 4</cell><cell>6.71</cell><cell>4.21</cell><cell>6.54</cell><cell>4.38</cell><cell>5.03</cell><cell>6.44</cell><cell>5.96</cell><cell>4.17</cell></row><row><cell>Group 5</cell><cell>5.29</cell><cell>3.25</cell><cell>4.93</cell><cell>3.42</cell><cell>3.45</cell><cell>4.46</cell><cell>4.42</cell><cell>3.31</cell></row><row><cell>Average</cell><cell>5.73</cell><cell>3.53</cell><cell>5.48</cell><cell>3.71</cell><cell>4.02</cell><cell>5.30</cell><cell>4.94</cell><cell>3.55</cell></row></table></figure>
		</body>
		<back>

			<div type="acknowledgement">
<div xmlns="http://www.tei-c.org/ns/1.0"><head>Acknowledgements</head><p>This work was supported by National Natural Science Foundation of China (62305255), Hubei Provincial Natural Science Foundation General Project (No.2022CFB537), Hubei Provincial Department of Education Science and Technology Research Program Youth Talent (No.Q20221706), and China Scholarship Council (202308420128).</p></div>
			</div>

			<div type="references">

				<listBibl>

<biblStruct xml:id="b0">
	<analytic>
		<title level="a" type="main">Research on the deep learning-based exposure invariant spectral reconstruction method</title>
		<author>
			<persName><forename type="first">J</forename><surname>Liang</surname></persName>
		</author>
		<author>
			<persName><forename type="first">L</forename><surname>Xin</surname></persName>
		</author>
		<author>
			<persName><forename type="first">Z</forename><surname>Zuo</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="j">J. Frontiers in Neuroscience</title>
		<imprint>
			<biblScope unit="volume">16</biblScope>
			<biblScope unit="page">1031546</biblScope>
			<date type="published" when="2022">2022</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b1">
	<analytic>
		<title level="a" type="main">Hierarchical regression network for spectral reconstruction from RGB images</title>
		<author>
			<persName><forename type="first">Y</forename><surname>Zhao</surname></persName>
		</author>
		<author>
			<persName><forename type="first">L</forename><forename type="middle">M</forename><surname>Po</surname></persName>
		</author>
		<author>
			<persName><forename type="first">Q</forename><surname>Yan</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="m">Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition Workshops</title>
				<meeting>the IEEE/CVF Conference on Computer Vision and Pattern Recognition Workshops</meeting>
		<imprint>
			<date type="published" when="2020">2020</date>
			<biblScope unit="page" from="422" to="423" />
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b2">
	<analytic>
		<title level="a" type="main">Mst++: Multi-stage spectral-wise transformer for efficient spectral reconstruction</title>
		<author>
			<persName><forename type="first">Y</forename><surname>Cai</surname></persName>
		</author>
		<author>
			<persName><forename type="first">J</forename><surname>Lin</surname></persName>
		</author>
		<author>
			<persName><forename type="first">Z</forename><surname>Lin</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="m">Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition</title>
				<meeting>the IEEE/CVF Conference on Computer Vision and Pattern Recognition</meeting>
		<imprint>
			<date type="published" when="2022">2022</date>
			<biblScope unit="page" from="745" to="755" />
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b3">
	<analytic>
		<title level="a" type="main">On the optimization of regression-based spectral reconstruction</title>
		<author>
			<persName><forename type="first">Yi-Tun</forename><surname>Lin</surname></persName>
		</author>
		<author>
			<persName><forename type="first">Graham</forename><forename type="middle">D</forename><surname>Finlayson</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="j">J. Sensors</title>
		<imprint>
			<biblScope unit="volume">21</biblScope>
			<biblScope unit="page">5586</biblScope>
			<date type="published" when="2021">2021</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b4">
	<analytic>
		<title level="a" type="main">Spectrogenic imaging: A novel approach to multispectral imaging in an uncontrolled environment</title>
		<author>
			<persName><forename type="first">R</forename><surname>Shrestha</surname></persName>
		</author>
		<author>
			<persName><forename type="first">J</forename><surname>Hardeberg</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="j">J. Optics Express</title>
		<imprint>
			<biblScope unit="volume">22</biblScope>
			<biblScope unit="issue">8</biblScope>
			<biblScope unit="page" from="9123" to="9133" />
			<date type="published" when="2014">2014</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b5">
	<analytic>
		<title level="a" type="main">Illuminant estimation in multispectral imaging</title>
		<author>
			<persName><forename type="first">H</forename><forename type="middle">A</forename><surname>Khan</surname></persName>
		</author>
		<author>
			<persName><forename type="first">J B</forename><surname>Thomas</surname></persName>
		</author>
		<author>
			<persName><forename type="first">J Y</forename><surname>Hardeberg</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="j">J. JOSA</title>
		<imprint>
			<biblScope unit="volume">34</biblScope>
			<biblScope unit="issue">7</biblScope>
			<biblScope unit="page" from="1085" to="1098" />
			<date type="published" when="2017">2017</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b6">
	<analytic>
		<title level="a" type="main">Multispectral camera as spatiospectrophotometer under uncontrolled illumination</title>
		<author>
			<persName><forename type="first">H</forename><forename type="middle">A</forename><surname>Khan</surname></persName>
		</author>
		<author>
			<persName><forename type="first">J B</forename><surname>Thomas</surname></persName>
		</author>
		<author>
			<persName><forename type="first">J Y</forename><surname>Hardeberg</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="j">J. Optics Express</title>
		<imprint>
			<biblScope unit="volume">27</biblScope>
			<biblScope unit="issue">2</biblScope>
			<biblScope unit="page" from="1051" to="1070" />
			<date type="published" when="2019">2019</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b7">
	<analytic>
		<title level="a" type="main">Exposure invariance in spectral reconstruction from rgb images</title>
		<author>
			<persName><forename type="first">Y T</forename><surname>Lin</surname></persName>
		</author>
		<author>
			<persName><forename type="first">G</forename><surname>Finlayson</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="m">Society for Imaging Science and Technology</title>
				<imprint>
			<date type="published" when="2019">2019</date>
			<biblScope unit="page" from="284" to="289" />
		</imprint>
	</monogr>
	<note>Color and Imaging Conference</note>
</biblStruct>

<biblStruct xml:id="b8">
	<analytic>
		<title level="a" type="main">Physically plausible spectral reconstruction from RGB images</title>
		<author>
			<persName><forename type="first">Y T</forename><surname>Lin</surname></persName>
		</author>
		<author>
			<persName><forename type="first">G</forename><surname>Finlayson</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="m">Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition Workshops</title>
				<meeting>the IEEE/CVF Conference on Computer Vision and Pattern Recognition Workshops</meeting>
		<imprint>
			<date type="published" when="2020">2020</date>
			<biblScope unit="page" from="532" to="533" />
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b9">
	<analytic>
		<title level="a" type="main">A spectral invariant representation of spectral reflectance</title>
		<author>
			<persName><forename type="first">A</forename><surname>Ibrahim</surname></persName>
		</author>
		<author>
			<persName><forename type="first">S</forename><surname>Tominaga</surname></persName>
		</author>
		<author>
			<persName><forename type="first">T</forename><surname>Horiuchi</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="j">J. Optical Review</title>
		<imprint>
			<biblScope unit="volume">18</biblScope>
			<biblScope unit="issue">2</biblScope>
			<biblScope unit="page" from="231" to="236" />
			<date type="published" when="2011">2011</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b10">
	<analytic>
		<title level="a" type="main">Adaptive Weighted Spectral Reconstruction Method Against Exposure Variation</title>
		<author>
			<persName><forename type="first">J</forename><surname>Liang</surname></persName>
		</author>
		<author>
			<persName><forename type="first">L</forename><surname>Xin</surname></persName>
		</author>
		<author>
			<persName><forename type="first">J</forename><surname>Cheng</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="j">J. Spectroscopy and spectral analysis</title>
		<imprint>
			<biblScope unit="volume">43</biblScope>
			<biblScope unit="page" from="3330" to="3338" />
			<date type="published" when="2023">2023</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b11">
	<analytic>
		<title level="a" type="main">Color image processing pipeline</title>
		<author>
			<persName><forename type="first">R</forename><surname>Ramanath</surname></persName>
		</author>
		<author>
			<persName><forename type="first">W</forename><forename type="middle">E</forename><surname>Snyder</surname></persName>
		</author>
		<author>
			<persName><forename type="first">Y</forename><surname>Yoo</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="j">J. IEEE Signal Processing Magazine</title>
		<imprint>
			<biblScope unit="volume">22</biblScope>
			<biblScope unit="issue">1</biblScope>
			<biblScope unit="page" from="34" to="43" />
			<date type="published" when="2005">2005</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b12">
	<analytic>
		<title level="a" type="main">Digital camera simulation</title>
		<author>
			<persName><forename type="first">J</forename><forename type="middle">E</forename><surname>Farrell</surname></persName>
		</author>
		<author>
			<persName><forename type="first">P</forename><forename type="middle">B</forename><surname>Catrysse</surname></persName>
		</author>
		<author>
			<persName><forename type="first">B</forename><surname>Wandell</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="j">J. Applied Optics</title>
		<imprint>
			<biblScope unit="volume">51</biblScope>
			<biblScope unit="issue">4</biblScope>
			<biblScope unit="page" from="A80" to="A90" />
			<date type="published" when="2012">2012</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b13">
	<analytic>
		<title level="a" type="main">Camera response prediction for various capture settings using the spectral sensitivity and crosstalk model</title>
		<author>
			<persName><forename type="first">J</forename><surname>Qiu</surname></persName>
		</author>
		<author>
			<persName><forename type="first">H</forename><surname>Xu</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="j">J. Applied Optics</title>
		<imprint>
			<biblScope unit="volume">55</biblScope>
			<biblScope unit="page" from="6989" to="6999" />
			<date type="published" when="2016">2016</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b14">
	<analytic>
		<title level="a" type="main">What is the space of spectral sensitivity functions for digital color cameras?</title>
		<author>
			<persName><forename type="first">J</forename><surname>Jiang</surname></persName>
		</author>
		<author>
			<persName><forename type="first">D</forename><surname>Liu</surname></persName>
		</author>
		<author>
			<persName><forename type="first">J</forename><surname>Gu</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="m">IEEE Workshop on Applications of Computer Vision (WACV)</title>
				<imprint>
			<date type="published" when="2013">2013. 2013</date>
			<biblScope unit="page" from="168" to="179" />
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b15">
	<analytic>
		<title level="a" type="main">Measurement and estimation of spectral sensitivity functions for mobile phone cameras</title>
		<author>
			<persName><forename type="first">S</forename><surname>Tominaga</surname></persName>
		</author>
		<author>
			<persName><forename type="first">N</forename><surname>Shogo</surname></persName>
		</author>
		<author>
			<persName><forename type="first">O</forename><surname>Ryo</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="j">Sensors</title>
		<imprint>
			<biblScope unit="volume">21</biblScope>
			<biblScope unit="page">4985</biblScope>
			<date type="published" when="2021">2021</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b16">
	<analytic>
		<title level="a" type="main">Optimized method for spectral reflectance reconstruction from camera responses</title>
		<author>
			<persName><forename type="first">J</forename><surname>Liang</surname></persName>
		</author>
		<author>
			<persName><forename type="first">Wan</forename><forename type="middle">X</forename></persName>
		</author>
	</analytic>
	<monogr>
		<title level="j">J. Optics Express</title>
		<imprint>
			<biblScope unit="volume">25</biblScope>
			<biblScope unit="page" from="28273" to="28287" />
			<date type="published" when="2017">2017</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b17">
	<analytic>
		<title level="a" type="main">Spectral recovery using polynomial models</title>
		<author>
			<persName><forename type="first">D</forename><forename type="middle">R</forename><surname>Connah</surname></persName>
		</author>
		<author>
			<persName><forename type="first">J</forename><surname>Hardeberg</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="m">Color Imaging X: Processing, Hardcopy, and Applications. SPIE</title>
				<imprint>
			<date type="published" when="2005">2005</date>
			<biblScope unit="volume">5667</biblScope>
			<biblScope unit="page" from="65" to="75" />
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b18">
	<analytic>
		<title level="a" type="main">Spectral reflectance reconstruction from RGB images based on weighting smaller color difference group</title>
		<author>
			<persName><forename type="first">B</forename><surname>Cao</surname></persName>
		</author>
		<author>
			<persName><forename type="first">N</forename><surname>Liao</surname></persName>
		</author>
		<author>
			<persName><forename type="first">H</forename><surname>Cheng</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="j">J. Color Research &amp; Application</title>
		<imprint>
			<biblScope unit="volume">42</biblScope>
			<biblScope unit="issue">3</biblScope>
			<biblScope unit="page" from="327" to="332" />
			<date type="published" when="2017">2017</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b19">
	<analytic>
		<title level="a" type="main">Self-training-based spectral image reconstruction for art paintings with multispectral imaging</title>
		<author>
			<persName><forename type="first">P</forename><surname>Xu</surname></persName>
		</author>
		<author>
			<persName><forename type="first">H</forename><surname>Xu</surname></persName>
		</author>
		<author>
			<persName><forename type="first">C</forename><surname>Diao</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="j">J. Applied optics</title>
		<imprint>
			<biblScope unit="volume">56</biblScope>
			<biblScope unit="page" from="8461" to="8470" />
			<date type="published" when="2017">2017</date>
		</imprint>
	</monogr>
</biblStruct>

				</listBibl>
			</div>
		</back>
	</text>
</TEI>
