font
Ayodeji, Opeyemi Abioye; Prior, Stephen; Thomas, Trevor; Saddington, Peter; Ramchurn, Sarvapali
Quantifying the effects of varying light-visibility and noise-sound levels in practical multimodal speech and visual gesture (mSVG) interaction with aerobots Proceedings Article
In: IEEE International Conference on Applied System Innovation (IEEE ICASI) 2018, pp. 842–845, IEEE, 2018.
Abstract | Links | BibTeX | Tags: Aerobot, mSVG (multimodal speech and visual gesture), nCA (navigation control autonomy), Speech
@inproceedings{soton418871,
title = {Quantifying the effects of varying light-visibility and noise-sound levels in practical multimodal speech and visual gesture (mSVG) interaction with aerobots},
author = {Opeyemi Abioye Ayodeji and Stephen Prior and Trevor Thomas and Peter Saddington and Sarvapali Ramchurn},
url = {https://eprints.soton.ac.uk/418871/},
year = {2018},
date = {2018-06-01},
booktitle = {IEEE International Conference on Applied System Innovation (IEEE ICASI) 2018},
pages = {842–845},
publisher = {IEEE},
abstract = {This paper discusses the research work conducted to quantify the effective range of lighting levels and ambient noise levels in order to inform the design and development of a multimodal speech and visual gesture (mSVG) control interface for the control of a UAV. Noise level variation from 55 dB to 85 dB is observed under control lab conditions to determine where speech commands for a UAV fails, and to consider why, and possibly suggest a solution around this. Similarly, lighting levels are varied within the control lab condition to determine a range of effective visibility levels. The limitation of this work and some further work from this were also presented.},
keywords = {Aerobot, mSVG (multimodal speech and visual gesture), nCA (navigation control autonomy), Speech},
pubstate = {published},
tppubtype = {inproceedings}
}
Ayodeji, Opeyemi Abioye; Prior, Stephen; Thomas, Trevor; Saddington, Peter; Ramchurn, Sarvapali
Multimodal human aerobotic interaction Book Section
In: Issa, Tomayess; Kommers, Piet; Issa, Theodora; Isa'ias, Pedro; Issa, Touma B. (Ed.): Smart Technology Applications in Business Environments, pp. 39–62, IGI Global, 2017.
Abstract | Links | BibTeX | Tags: Aerobot, Control, Gesture, hci, Interface, Multimodal, nCA, Unimodal
@incollection{soton406888b,
title = {Multimodal human aerobotic interaction},
author = {Opeyemi Abioye Ayodeji and Stephen Prior and Trevor Thomas and Peter Saddington and Sarvapali Ramchurn},
editor = {Tomayess Issa and Piet Kommers and Theodora Issa and Pedro Isa'ias and Touma B. Issa},
url = {https://eprints.soton.ac.uk/406888/},
year = {2017},
date = {2017-03-01},
booktitle = {Smart Technology Applications in Business Environments},
pages = {39–62},
publisher = {IGI Global},
abstract = {This chapter discusses HCI interfaces used in controlling aerial robotic systems (otherwise known as aerobots). The autonomy control level of aerobot is also discussed. However, due to the limitations of existing models, a novel classification model of autonomy, specifically designed for multirotor aerial robots, called the navigation control autonomy (nCA) model is also developed. Unlike the existing models such as the AFRL and ONR, this model is presented in tiers and has a two-dimensional pyramidal structure. This model is able to identify the control void existing beyond tier-one autonomy components modes and to map the upper and lower limits of control interfaces. Two solutions are suggested for dealing with the existing control void and the limitations of the RC joystick controller ? the multimodal HHI-like interface and the unimodal BCI interface. In addition to these, some human factors based performance measurement is recommended, and the plans for further works presented.},
keywords = {Aerobot, Control, Gesture, hci, Interface, Multimodal, nCA, Unimodal},
pubstate = {published},
tppubtype = {incollection}
}
Ayodeji, Opeyemi Abioye; Prior, Stephen; Thomas, Trevor; Saddington, Peter; Ramchurn, Sarvapali
Quantifying the effects of varying light-visibility and noise-sound levels in practical multimodal speech and visual gesture (mSVG) interaction with aerobots Proceedings Article
In: IEEE International Conference on Applied System Innovation (IEEE ICASI) 2018, pp. 842–845, IEEE, 2018.
@inproceedings{soton418871,
title = {Quantifying the effects of varying light-visibility and noise-sound levels in practical multimodal speech and visual gesture (mSVG) interaction with aerobots},
author = {Opeyemi Abioye Ayodeji and Stephen Prior and Trevor Thomas and Peter Saddington and Sarvapali Ramchurn},
url = {https://eprints.soton.ac.uk/418871/},
year = {2018},
date = {2018-06-01},
booktitle = {IEEE International Conference on Applied System Innovation (IEEE ICASI) 2018},
pages = {842–845},
publisher = {IEEE},
abstract = {This paper discusses the research work conducted to quantify the effective range of lighting levels and ambient noise levels in order to inform the design and development of a multimodal speech and visual gesture (mSVG) control interface for the control of a UAV. Noise level variation from 55 dB to 85 dB is observed under control lab conditions to determine where speech commands for a UAV fails, and to consider why, and possibly suggest a solution around this. Similarly, lighting levels are varied within the control lab condition to determine a range of effective visibility levels. The limitation of this work and some further work from this were also presented.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Ayodeji, Opeyemi Abioye; Prior, Stephen; Thomas, Trevor; Saddington, Peter; Ramchurn, Sarvapali
Multimodal human aerobotic interaction Book Section
In: Issa, Tomayess; Kommers, Piet; Issa, Theodora; Isa'ias, Pedro; Issa, Touma B. (Ed.): Smart Technology Applications in Business Environments, pp. 39–62, IGI Global, 2017.
@incollection{soton406888b,
title = {Multimodal human aerobotic interaction},
author = {Opeyemi Abioye Ayodeji and Stephen Prior and Trevor Thomas and Peter Saddington and Sarvapali Ramchurn},
editor = {Tomayess Issa and Piet Kommers and Theodora Issa and Pedro Isa'ias and Touma B. Issa},
url = {https://eprints.soton.ac.uk/406888/},
year = {2017},
date = {2017-03-01},
booktitle = {Smart Technology Applications in Business Environments},
pages = {39–62},
publisher = {IGI Global},
abstract = {This chapter discusses HCI interfaces used in controlling aerial robotic systems (otherwise known as aerobots). The autonomy control level of aerobot is also discussed. However, due to the limitations of existing models, a novel classification model of autonomy, specifically designed for multirotor aerial robots, called the navigation control autonomy (nCA) model is also developed. Unlike the existing models such as the AFRL and ONR, this model is presented in tiers and has a two-dimensional pyramidal structure. This model is able to identify the control void existing beyond tier-one autonomy components modes and to map the upper and lower limits of control interfaces. Two solutions are suggested for dealing with the existing control void and the limitations of the RC joystick controller ? the multimodal HHI-like interface and the unimodal BCI interface. In addition to these, some human factors based performance measurement is recommended, and the plans for further works presented.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Ayodeji, Opeyemi Abioye; Prior, Stephen; Thomas, Trevor; Saddington, Peter; Ramchurn, Sarvapali
Quantifying the effects of varying light-visibility and noise-sound levels in practical multimodal speech and visual gesture (mSVG) interaction with aerobots Proceedings Article
In: IEEE International Conference on Applied System Innovation (IEEE ICASI) 2018, pp. 842–845, IEEE, 2018.
Abstract | Links | BibTeX | Tags: Aerobot, mSVG (multimodal speech and visual gesture), nCA (navigation control autonomy), Speech
@inproceedings{soton418871,
title = {Quantifying the effects of varying light-visibility and noise-sound levels in practical multimodal speech and visual gesture (mSVG) interaction with aerobots},
author = {Opeyemi Abioye Ayodeji and Stephen Prior and Trevor Thomas and Peter Saddington and Sarvapali Ramchurn},
url = {https://eprints.soton.ac.uk/418871/},
year = {2018},
date = {2018-06-01},
booktitle = {IEEE International Conference on Applied System Innovation (IEEE ICASI) 2018},
pages = {842–845},
publisher = {IEEE},
abstract = {This paper discusses the research work conducted to quantify the effective range of lighting levels and ambient noise levels in order to inform the design and development of a multimodal speech and visual gesture (mSVG) control interface for the control of a UAV. Noise level variation from 55 dB to 85 dB is observed under control lab conditions to determine where speech commands for a UAV fails, and to consider why, and possibly suggest a solution around this. Similarly, lighting levels are varied within the control lab condition to determine a range of effective visibility levels. The limitation of this work and some further work from this were also presented.},
keywords = {Aerobot, mSVG (multimodal speech and visual gesture), nCA (navigation control autonomy), Speech},
pubstate = {published},
tppubtype = {inproceedings}
}
Ayodeji, Opeyemi Abioye; Prior, Stephen; Thomas, Trevor; Saddington, Peter; Ramchurn, Sarvapali
Multimodal human aerobotic interaction Book Section
In: Issa, Tomayess; Kommers, Piet; Issa, Theodora; Isa'ias, Pedro; Issa, Touma B. (Ed.): Smart Technology Applications in Business Environments, pp. 39–62, IGI Global, 2017.
Abstract | Links | BibTeX | Tags: Aerobot, Control, Gesture, hci, Interface, Multimodal, nCA, Unimodal
@incollection{soton406888b,
title = {Multimodal human aerobotic interaction},
author = {Opeyemi Abioye Ayodeji and Stephen Prior and Trevor Thomas and Peter Saddington and Sarvapali Ramchurn},
editor = {Tomayess Issa and Piet Kommers and Theodora Issa and Pedro Isa'ias and Touma B. Issa},
url = {https://eprints.soton.ac.uk/406888/},
year = {2017},
date = {2017-03-01},
booktitle = {Smart Technology Applications in Business Environments},
pages = {39–62},
publisher = {IGI Global},
abstract = {This chapter discusses HCI interfaces used in controlling aerial robotic systems (otherwise known as aerobots). The autonomy control level of aerobot is also discussed. However, due to the limitations of existing models, a novel classification model of autonomy, specifically designed for multirotor aerial robots, called the navigation control autonomy (nCA) model is also developed. Unlike the existing models such as the AFRL and ONR, this model is presented in tiers and has a two-dimensional pyramidal structure. This model is able to identify the control void existing beyond tier-one autonomy components modes and to map the upper and lower limits of control interfaces. Two solutions are suggested for dealing with the existing control void and the limitations of the RC joystick controller ? the multimodal HHI-like interface and the unimodal BCI interface. In addition to these, some human factors based performance measurement is recommended, and the plans for further works presented.},
keywords = {Aerobot, Control, Gesture, hci, Interface, Multimodal, nCA, Unimodal},
pubstate = {published},
tppubtype = {incollection}
}
Ayodeji, Opeyemi Abioye; Prior, Stephen; Thomas, Trevor; Saddington, Peter; Ramchurn, Sarvapali
Quantifying the effects of varying light-visibility and noise-sound levels in practical multimodal speech and visual gesture (mSVG) interaction with aerobots Proceedings Article
In: IEEE International Conference on Applied System Innovation (IEEE ICASI) 2018, pp. 842–845, IEEE, 2018.
@inproceedings{soton418871,
title = {Quantifying the effects of varying light-visibility and noise-sound levels in practical multimodal speech and visual gesture (mSVG) interaction with aerobots},
author = {Opeyemi Abioye Ayodeji and Stephen Prior and Trevor Thomas and Peter Saddington and Sarvapali Ramchurn},
url = {https://eprints.soton.ac.uk/418871/},
year = {2018},
date = {2018-06-01},
booktitle = {IEEE International Conference on Applied System Innovation (IEEE ICASI) 2018},
pages = {842–845},
publisher = {IEEE},
abstract = {This paper discusses the research work conducted to quantify the effective range of lighting levels and ambient noise levels in order to inform the design and development of a multimodal speech and visual gesture (mSVG) control interface for the control of a UAV. Noise level variation from 55 dB to 85 dB is observed under control lab conditions to determine where speech commands for a UAV fails, and to consider why, and possibly suggest a solution around this. Similarly, lighting levels are varied within the control lab condition to determine a range of effective visibility levels. The limitation of this work and some further work from this were also presented.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Ayodeji, Opeyemi Abioye; Prior, Stephen; Thomas, Trevor; Saddington, Peter; Ramchurn, Sarvapali
Multimodal human aerobotic interaction Book Section
In: Issa, Tomayess; Kommers, Piet; Issa, Theodora; Isa'ias, Pedro; Issa, Touma B. (Ed.): Smart Technology Applications in Business Environments, pp. 39–62, IGI Global, 2017.
@incollection{soton406888b,
title = {Multimodal human aerobotic interaction},
author = {Opeyemi Abioye Ayodeji and Stephen Prior and Trevor Thomas and Peter Saddington and Sarvapali Ramchurn},
editor = {Tomayess Issa and Piet Kommers and Theodora Issa and Pedro Isa'ias and Touma B. Issa},
url = {https://eprints.soton.ac.uk/406888/},
year = {2017},
date = {2017-03-01},
booktitle = {Smart Technology Applications in Business Environments},
pages = {39–62},
publisher = {IGI Global},
abstract = {This chapter discusses HCI interfaces used in controlling aerial robotic systems (otherwise known as aerobots). The autonomy control level of aerobot is also discussed. However, due to the limitations of existing models, a novel classification model of autonomy, specifically designed for multirotor aerial robots, called the navigation control autonomy (nCA) model is also developed. Unlike the existing models such as the AFRL and ONR, this model is presented in tiers and has a two-dimensional pyramidal structure. This model is able to identify the control void existing beyond tier-one autonomy components modes and to map the upper and lower limits of control interfaces. Two solutions are suggested for dealing with the existing control void and the limitations of the RC joystick controller ? the multimodal HHI-like interface and the unimodal BCI interface. In addition to these, some human factors based performance measurement is recommended, and the plans for further works presented.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Multi-agent signal-less intersection management with dynamic platoon formation
AI Foundation Models: initial review, CMA Consultation, TAS Hub Response
The effect of data visualisation quality and task density on human-swarm interaction
Demonstrating performance benefits of human-swarm teaming
Ayodeji, Opeyemi Abioye; Prior, Stephen; Thomas, Trevor; Saddington, Peter; Ramchurn, Sarvapali
Quantifying the effects of varying light-visibility and noise-sound levels in practical multimodal speech and visual gesture (mSVG) interaction with aerobots Proceedings Article
In: IEEE International Conference on Applied System Innovation (IEEE ICASI) 2018, pp. 842–845, IEEE, 2018.
@inproceedings{soton418871,
title = {Quantifying the effects of varying light-visibility and noise-sound levels in practical multimodal speech and visual gesture (mSVG) interaction with aerobots},
author = {Opeyemi Abioye Ayodeji and Stephen Prior and Trevor Thomas and Peter Saddington and Sarvapali Ramchurn},
url = {https://eprints.soton.ac.uk/418871/},
year = {2018},
date = {2018-06-01},
booktitle = {IEEE International Conference on Applied System Innovation (IEEE ICASI) 2018},
pages = {842–845},
publisher = {IEEE},
abstract = {This paper discusses the research work conducted to quantify the effective range of lighting levels and ambient noise levels in order to inform the design and development of a multimodal speech and visual gesture (mSVG) control interface for the control of a UAV. Noise level variation from 55 dB to 85 dB is observed under control lab conditions to determine where speech commands for a UAV fails, and to consider why, and possibly suggest a solution around this. Similarly, lighting levels are varied within the control lab condition to determine a range of effective visibility levels. The limitation of this work and some further work from this were also presented.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Ayodeji, Opeyemi Abioye; Prior, Stephen; Thomas, Trevor; Saddington, Peter; Ramchurn, Sarvapali
Multimodal human aerobotic interaction Book Section
In: Issa, Tomayess; Kommers, Piet; Issa, Theodora; Isa'ias, Pedro; Issa, Touma B. (Ed.): Smart Technology Applications in Business Environments, pp. 39–62, IGI Global, 2017.
@incollection{soton406888b,
title = {Multimodal human aerobotic interaction},
author = {Opeyemi Abioye Ayodeji and Stephen Prior and Trevor Thomas and Peter Saddington and Sarvapali Ramchurn},
editor = {Tomayess Issa and Piet Kommers and Theodora Issa and Pedro Isa'ias and Touma B. Issa},
url = {https://eprints.soton.ac.uk/406888/},
year = {2017},
date = {2017-03-01},
booktitle = {Smart Technology Applications in Business Environments},
pages = {39–62},
publisher = {IGI Global},
abstract = {This chapter discusses HCI interfaces used in controlling aerial robotic systems (otherwise known as aerobots). The autonomy control level of aerobot is also discussed. However, due to the limitations of existing models, a novel classification model of autonomy, specifically designed for multirotor aerial robots, called the navigation control autonomy (nCA) model is also developed. Unlike the existing models such as the AFRL and ONR, this model is presented in tiers and has a two-dimensional pyramidal structure. This model is able to identify the control void existing beyond tier-one autonomy components modes and to map the upper and lower limits of control interfaces. Two solutions are suggested for dealing with the existing control void and the limitations of the RC joystick controller ? the multimodal HHI-like interface and the unimodal BCI interface. In addition to these, some human factors based performance measurement is recommended, and the plans for further works presented.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}