<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE article
  PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.1 20151215//EN" "https://jats.nlm.nih.gov/publishing/1.1/JATS-journalpublishing1.dtd">
<article article-type="research-article" dtd-version="1.1" specific-use="sps-1.8" xml:lang="es" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink">
	<front>
		<journal-meta>
			<journal-id journal-id-type="publisher-id">avances</journal-id>
			<journal-title-group>
				<journal-title>Avances: Investigación en Ingeniería</journal-title>
				<abbrev-journal-title abbrev-type="publisher">Avances</abbrev-journal-title>
			</journal-title-group>
			<issn publication-format="print">1794-4953</issn>
            <issn publication-format="electronic">2619-6581</issn>
			<publisher>
				<publisher-name>Universidad Libre</publisher-name>
			</publisher>
		</journal-meta>
		<article-meta>
			<article-id pub-id-type="doi">10.18041/1794-4953/avances.1.5545</article-id>
			<article-categories>
				<subj-group subj-group-type="heading">
					<subject>Artículos</subject>
				</subj-group>
			</article-categories>
			<title-group>
				<article-title>The initial process of creating a guide to evaluate the usability in Virtual Learning Environments</article-title>
				<trans-title-group xml:lang="es">
					<trans-title>Proceso inicial de construcción de una guía para la evaluación de la usabilidad en entornos virtuales de aprendizaje</trans-title>
				</trans-title-group>
			</title-group>
            <contrib-group>
				<contrib contrib-type="author" corresp="yes">
                    <contrib-id contrib-id-type="orcid">https://orcid.org/0000-0002-0454-350X</contrib-id>
					<name>
						<surname>Pinto-Corredor</surname>
						<given-names>Juan David</given-names>
					</name>                    
                    <email>juandavidpinto4444@gmail.com</email>
                    <xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
				</contrib>
				<contrib contrib-type="author" corresp="yes">
                    <contrib-id contrib-id-type="orcid">https://orcid.org/0000-0003-0870-6895</contrib-id>
                    <name>
						<surname>Agredo Delgado</surname>
						<given-names>Vanessa</given-names>
					</name>
                    <email>vagredo@unicomfacauca.edu.co</email>
                    <xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
				</contrib>
                <contrib contrib-type="author" corresp="yes">
                    <contrib-id contrib-id-type="orcid">https://orcid.org/0000-0003-0870-6895</contrib-id>
                    <name>
						<surname>Ruiz</surname>
						<given-names>Pablo H.</given-names>
					</name>
                    <email>pruiz@unicomfacauca.edu.co</email>
                    <xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
				</contrib>
                <contrib contrib-type="author" corresp="yes">
                    <contrib-id contrib-id-type="orcid">https://orcid.org/0000-0003-0870-6895</contrib-id>
                    <name>
						<surname>Collazos</surname>
						<given-names>Cesar A.</given-names>
					</name>
                    <email>cccollazo@unicauca.edu.co</email>
                    <xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
				</contrib>
			</contrib-group>
            <aff id="aff1">
                <label>1</label>
                <institution content-type="original">Universidad del Cauca, Popayán, Colombia</institution>
                <institution content-type="normalized">Universidad del Cauca</institution>
                <institution content-type="orgname">Universidad del Cauca</institution>
                <addr-line>
                    <city>Popayán</city>
                </addr-line>
                <country country="CO">Colombia</country>
            </aff>
            <aff id="aff2">
                <label>2</label>
                <institution content-type="original">Corporación Universitaria Comfacauca – Unicomfacauca, Popayán, Colombia</institution>
                <institution content-type="normalized">Corporación Universitaria Comfacauca</institution>
                <institution content-type="orgname">Corporación Universitaria Comfacauca</institution>
                <addr-line>
                    <city>Popayán</city>
				</addr-line>
                <country country="CO">Colombia</country>
            </aff>
            <aff id="aff3">
                <label>3</label>
                <institution content-type="original">Corporación Universitaria Comfacauca – Unicomfacauca, Popayán, Colombia</institution>
                <institution content-type="normalized">Corporación Universitaria Comfacauca</institution>
                <institution content-type="orgname">Corporación Universitaria Comfacauca</institution>
                <addr-line>
                    <city>Popayán</city>
				</addr-line>
                <country country="CO">Colombia</country>
            </aff>
            <aff id="aff4">
                <label>4</label>
                <institution content-type="original">Universidad del Cauca, Popayán, Colombia</institution>
                <institution content-type="normalized">Universidad del Cauca</institution>
                <institution content-type="orgname">Universidad del Cauca</institution>
                <addr-line>
                    <city>Popayán</city>
				</addr-line>
                <country country="CO">Colombia</country>
            </aff>
			<pub-date pub-type="collection">
				<season>Jan-Jun</season>
				<year>2021</year>
			</pub-date>
			<volume>18</volume>
			<issue>1</issue>
			<fpage>1</fpage>
			<lpage>13</lpage>
			<history>
				<date date-type="received">
					<day>01</day>
					<month>10</month>
					<year>2020</year>
				</date>
				<date date-type="accepted">
					<day>01</day>
					<month>01</month>
					<year>2021</year>
				</date>
			</history>
			<permissions>
				<license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by-nc-nd/4.0/" xml:lang="es">
					<license-p>Esta obra está bajo una licencia internacional Creative Commons Atribución-NoComercial-SinDerivadas 4.0</license-p>
				</license>
			</permissions>
			<abstract>
			<title>ABSTRACT</title>
				<p>The usability evaluation includes a set method to analyze the system quality used in different development life cycle stages, for which there is a wide variety of evaluation methods (EM), each method uses certain ways and techniques to measure several aspects. Its choice does not only depend on which answer you are looking for but on multiple factors. The existing EMs are appropriate to assess the Virtual Learning Environments (VLE) usability due to the lack of combination methods or specific evaluation methods for this type of software. This paper focuses on showing the initial process before the creation of the guide from the EM combination that allows the usability evaluation in VLE. The process was carried out, initially, with a bibliographic analysis about the EM usability of existing interactive systems and a comparison among them, where the first combination version was obtained to address the object of study and to select the useful methods in order to assess usability in this context. The result of the application of these metrics will be the combination of EM usability to form the VLE evaluation guide in upcoming research.</p>
			</abstract>
			<trans-abstract xml:lang="es">
			<title>RESUMEN</title>
			    <p>La evaluación de usabilidad incluye un conjunto de métodos para analizar la calidad de uso del sistema en diferentes etapas del ciclo de vida del desarrollo, para lo cual existe una amplia variedad de métodos de evaluación (ME), cada método usa ciertas formas y técnicas para medir diferentes aspectos. Su elección no solo depende de qué respuesta se está buscando, sino de múltiples factores. El problema surge cuando se busca cuál de los ME existentes son apropiados para evaluar la usabilidad de los entornos virtuales de aprendizaje (EVA), considerando que no hay métodos combinados o métodos de evaluación específicos para este tipo de software para obtener una evaluación completa, consistente y considerar factores tales como efectividad, eficiencia, satisfacción, tiempos razonables, entre otros. Es por eso que surge la siguiente pregunta: ¿Qué combinación de ME de usabilidad son apropiadas para aplicar en EVAs? Este artículo se centra en mostrar el proceso inicial antes de la construcción de la guía obtenida de la combinación de ME que permitan la evaluación de usabilidad en EVAs. Proceso que se realizó, inicialmente, con un análisis bibliográfico sobre los ME de usabilidad de sistemas interactivos existentes y una comparación entre ellos, donde se obtuvo la primera versión de la combinación, con esta versión, se definió un conjunto de métricas que se aplicarán al EVA objeto de estudio y permitirá seleccionar los métodos útiles para evaluar la usabilidad en este contexto. El resultado de la aplicación de estas métricas será la combinación de ME de usabilidad para formar la guía de evaluación de EVA en próximas investigaciones.</p>
			</trans-abstract>
            <kwd-group xml:lang="en">
			    <title>Keywords</title>
				<kwd>Education</kwd>
                <kwd>Virtual Learning Environments</kwd>
                <kwd>Usability</kwd>
                <kwd>User Experience</kwd>
                <kwd>Usability Evaluation Methods</kwd>
			</kwd-group>
			<kwd-group xml:lang="es">
				<title>Keywords</title>
				<kwd>Educación</kwd>
                <kwd>Entornos virtuales de aprendizaje</kwd>
                <kwd>Usabilidad</kwd>
                <kwd>Experiencia del usuario</kwd>
                <kwd>Métodos de evaluación de usabilidad</kwd>
			</kwd-group>
            <counts>
				<fig-count count="0"/>
				<table-count count="3"/>
				<equation-count count="0"/>
				<ref-count count="32"/>
				<page-count count="13"/>
			</counts>
		</article-meta>
	</front>
	<body>
		<sec sec-type="intro">
		    <title>1.	Introduction</title>
            <p>Due to the great Internet growth in recent decades, online education has become a great alternative to traditional education. In the same way, educational institutions use available technologies and advances to provide more information to a growing audience. While the online education system proposals and modalities are growing, the number of people who use them is growing, too. So, it is necessary to consider the diversity in people's needs and characteristics to design Virtual Learning Environments (VLE) [<xref  ref-type="bibr" rid="r1">1</xref>]. In this way, it contributes to design and builds online education systems; so, people can use them in a simple, effective, and efficient way that might provide a positive user experience. The increasing number of publics using them is growing; for this reason, the User Experience (UX) is a fundamental part of the success of the VLE [<xref  ref-type="bibr" rid="r2">2</xref>]. The UX refers to "how people feel about a product and its satisfaction when they use it, look at it, sustain it, open it or close it" [<xref  ref-type="bibr" rid="r2">2</xref>] The UX covers different aspects related to the software product quality such as accessibility, emotionality, usability, among others [<xref  ref-type="bibr" rid="r3">3</xref>]. In this sense, the current research focuses exclusively on the UX "usability" feature concerning the "ease of learning", (which is defined as the time that a user - who has never seen an interface- can learn to use it well and perform basic operations, how much does it take a typical community user to learn how to use relevant commands from a set of tasks? [<xref  ref-type="bibr" rid="r4">4</xref>]), specifically, in the VLE usability study.</p>
            <p>Also, the usability evaluation has been determined as the activity comprising a method set that analyzes the interactive system quality use in different development life cycle stages [<xref  ref-type="bibr" rid="r5">5</xref>]. It is necessary to perform the usability evaluation to validate if the final product meets the requirements and is easy to use. The evaluation’s main objectives are; to evaluate the system functionality scope and accessibility in order to evaluate the user’s experience in his/her interaction and to identify specific problems [<xref  ref-type="bibr" rid="r6">6</xref>], These are the objectives that will be sought when evaluating a VLE with a combination method guide. To perform the usability evaluation, there are different Usability Assessment Methods (UAM), which depend on variables such as costs, time availability, and human resources among others [<xref  ref-type="bibr" rid="r7">7</xref>]. In this way, choosing methods to evaluate VLE usability is not an easy task [<xref  ref-type="bibr" rid="r8">8</xref>]. A series of UAM can be applied on a VLE, but the concern is related to how precise the information is given and at any combination of it. Similarly, there is no standardization regarding what, how, and when to perform the usability evaluation, but methods have been developed and used in an isolation way and with specific criteria to evaluate a particular product [<xref  ref-type="bibr" rid="r9">9</xref>]. The usability assessment methods have strengths and weaknesses, they are focused on evaluating certain aspects or usability requirements, too. So, it is advisable to combine them in  evaluation to complement each other in terms of their strengths and to cover a greater number of evaluation aspects [<xref  ref-type="bibr" rid="r10">10</xref>]. The selection and evaluation methods combination will depend on financial and time constraints, the development cycle phases, and the development nature system [<xref  ref-type="bibr" rid="r11">11</xref>]. </p>
            <p>Based on that, the problem arises when deciding which of the existing evaluation methods or combination is appropriate to evaluate the VLE usability. Therefore, the evaluation is completely and consistently carried out, getting concrete results on its usability, considering factors such as effectiveness, efficiency, satisfaction, reasonable timing, among other factors [<xref  ref-type="bibr" rid="r11">11</xref>]. For this reason, the following research question emerges: Which of the existing UAMs are appropriate to apply in Virtual Learning Environments? That is why this paper focuses on the study about a set of methods for evaluating usability on VLE. These methods, after being selected, characterized and analyzed, will constitute a new combination method to evaluate the VLE usability, which can provide more complete and integral usability information, regarding the performance of the evaluation methods indiscriminately and independently. This paper is structured as follows: Section 2 shows a theoretical context to contextualize relevant research topics, section 3 contains some related works to usability evaluation in VLE. In section 4, the process to make the first UAM combination version and it is shown, and in section 5 the conclusions and future work are described</p>
        </sec>
        <sec>
            <title>2.	Theoretical context</title>
            <p>Important theoretical references about the guide development process for usability evaluation in Virtual Learning Environments are outlined below:</p>
            <sec>
                <title>2.1.	Virtual Learning Environments (VLE)</title>
                <p>The VLEs are part of the computer set of applications designed for educational online purposes, which aim to achieve the educational objectives by providing tools that facilitate the user and course management, communication processes, evaluation, collaboration, and content distribution [<xref  ref-type="bibr" rid="r11">11</xref>]. They present a functionality series to facilitate the teaching and learning processes that can unfold through a software tool according to each specific context need [<xref  ref-type="bibr" rid="r12">12</xref>].</p>
            </sec>
            <sec>
                <title>2.2.	User experience (UX)</title>
                <p>The term User Experience (UX) refers to "how people feel about a product and their satisfaction when they use it, look at it, sustain it, open it or close it" [<xref  ref-type="bibr" rid="r2">2</xref>]. There are different UX definitions used by professionals in the HCI (Human-Computer Interaction) area, one of the most outstanding is the ISO 9241-210 standard definition [<xref  ref-type="bibr" rid="r13">13</xref>], "Perceptions and person’s responses resulting from the usage of the product, system, or service usage".  UX covers different aspects related to software product quality. The ISO/IEC 25010 standard [<xref  ref-type="bibr" rid="r13">13</xref>] considers in a general way the following UX aspects: accessibility, dependability, emotivity, playability, usability, among others.</p>
            </sec>
            <sec>
                <title>2.3.	Usability</title>
                <p>The term usability, in general, is defined as the ease of use, whether it is a web page, a computer application, or another system that interacts with a user [<xref  ref-type="bibr" rid="r15">15</xref>]. Being one of the most important web applications quality features like reliability and security [<xref  ref-type="bibr" rid="r14">14</xref>]. It determines the user’s satisfaction when interacting with the system. The usability system and its constant improvements lead to a significant increase in the user's experience quality evaluation.</p>
            </sec>
            <sec>
                <title>2.4.	Usability evaluation</title>
                <p>The usability evaluation has been determined as the activity comprising a set of methods that analyzes the quality use of an interactive system, at different development life cycle stages [<xref  ref-type="bibr" rid="r15">15</xref>]. It is necessary to perform the usability evaluation to validate that the final product meets the requirements and is usable [<xref  ref-type="bibr" rid="r7">7</xref>]. Usability evaluation is a fundamental part of the software development iterative approach because evaluation activities can produce design solutions to be applied in the following development cycle or, at least, greater knowledge about the nature of the detected interaction problem [<xref  ref-type="bibr" rid="r9">9</xref>].</p>
            </sec>
            <sec>
                <title>2.5.	Usability Assessment Methods (UAM)</title>
                <p>The UAMs have become an interesting study source by the usability researchers, their application characteristics, the existing methods variety, and the generated results [<xref  ref-type="bibr" rid="r15">15</xref>]. They allow usability characteristics evaluation such as the ease of learning, the ease and efficiency of usage, the ease to remember how it works, the frequency, and error severity [<xref  ref-type="bibr" rid="r16">16</xref>].</p>
            </sec>
        </sec>
        <sec>
            <title>3.	Related works</title>
            <p>Below, some related works are presented to justify the need to do the research presented in this paper (also analyzed in the paper [<xref  ref-type="bibr" rid="r17">17</xref>]). Otaiza in [<xref  ref-type="bibr" rid="r18">18</xref>], presents a study in which the UAMs have been studied in transactional web applications, contrasting their characteristics, and generating a methodological evaluation proposal to obtain the largest amount of relevant information regarding the usability of these kinds of applications.</p>
            <p>In the same way [<xref  ref-type="bibr" rid="r19">19</xref>], they examine e-learning usability evaluation methods, compare them, and propose criteria set that should be consulted when choosing the appropriate method to evaluate the e-learning systems usability. The research shows that none of the examined methods has allowed integral e-learning platforms usability evaluation and none of them addresses all relevant specific topics for the learning systems and modules.</p>
            <p>In [<xref  ref-type="bibr" rid="r15">15</xref>], methodological criteria are presented to evaluate the Course Management Systems’ (CMS) usability. The evaluation was carried out by combining different methods and instruments with the potential platform users: a group of teachers and language students. Traditional usability evaluation methods were used, they were mixed and some new ones were originated to evaluate not only the elements that make up the usability but also the functionality and the pedagogical aspect of the CMS.</p>
            <p>In [<xref  ref-type="bibr" rid="r20">20</xref>], a model to evaluate the VLE quality is proposed, considering usability as the central axis. The model is called MUSA, because it is a model based on usability, and is oriented to evaluate products in use. The general ideas are based on a four-level strategy or evaluation layers, which start from the general to reach the particular, where the usability definitions among attributes and heuristics form the core model.</p>
            <p>Similarly, [<xref  ref-type="bibr" rid="r21">21</xref>] is a research that is focused on analyzing the virtual learning environment usability for undergraduate university students, emphasizing psycho-pedagogical aspects that allow evaluating both, the quality contents and the system that contains them. The students' frustration implications in their cognitive process are analyzed, establishing this emotion as an immediate bad interface designing consequence.</p>
        </sec>
        <sec>
            <title>4.	Guide on construction activities for the usability evaluation in virtual learning environments</title>
            <sec>
                <title>4.1.	Research Methodology</title>
                <p>This work is developed following a research methodology based on multi-cycle action with bifurcation [<xref  ref-type="bibr" rid="r22">22</xref>]. The strategy starts with an initial research cycle where three problems are identified: conceptual, methodological, and evaluation. This allows us to divide the work into three research cycles: conceptual cycle, methodological cycle, and evaluation cycle.</p>
                <sec>
                    <title>4.1.1.	Conceptual cycle</title>
                    <p>In this cycle, a contextual analysis is carried out to find the problem to be studied, this cycle has three research phases: A literature study about the virtual learning environments, user experience, usability, usability evaluation, and characteristics, attributes, or elements that are related to the usability assessment methods are analyzed from an extensive literature review. The second phase is the appropriate UAMs for executing in VLE identification, where, from the literature study, a possible set of methods are established to make up the proposed combination in this research, and finally, the activities, resources, and the phase of assigning a person in charge for each of the selected methods.</p>
                    <p>Besides, the reviewed and analyzed methods in the literature and the possibly formed combination of UAM in VLE are made and analyzed regarding what is presented in the project “Usability integration in software development process framework [<xref  ref-type="bibr" rid="r3">3</xref>]".</p>
                    <p>As evaluation methods and according to the research carried out by Muñoz et al. [<xref  ref-type="bibr" rid="r23">23</xref>], the three main groups were analyzed: inspection, inquiry, and testing. The methods of each group were compared with each other, this in order to determine their advantages and disadvantages, analyzing the most relevant characteristics (development stage where should the method be used, place of performance, if the method generates quantitative data, if it can be done remotely, the time it takes to carry it out, the number of required evaluators, the number of users required to execute it, if it is possible to do it automatically, and if the method analyzes the usability characteristics such as intelligibility, learning, operability, errors, esthetic, accessibility), considering the following related works [<xref  ref-type="bibr" rid="r24">24</xref>], [<xref  ref-type="bibr" rid="r18">18</xref>], [<xref  ref-type="bibr" rid="r19">19</xref>], [<xref  ref-type="bibr" rid="r15">15</xref>], [<xref  ref-type="bibr" rid="r20">20</xref>], [<xref  ref-type="bibr" rid="r21">21].</xref></p>
                    <p>From the comparison of the inspection methods shown in <xref  ref-type="table" rid="t1">Table 1</xref>, it can be said, in certain aspects, that the heuristic evaluation takes some advantages over the other methods, mainly because of the ease of carrying it out, which is not accomplished in other methods. According to the methods: actions analysis and standard inspection, highest level experts are required, and for the routes (cognitive and pluralistic) there must be considered task definition methodologies and certain training, characteristics that increase the complexity of carrying them out. However, the inspection potential methods are not in doubt, because, if the time and necessary conditions to carry them out are available, good results can be obtained [<xref  ref-type="bibr" rid="r23">23</xref>]. From the comparison, it can be seen that factors such as time, equipment, and experts’ level, heuristic evaluation is still the simplest method to perform. However, the cognitive journey characteristics are very similar to the heuristic evaluation ones, except for the experts’ experience level. Considering the aforementioned factor, the methods: inspection of standards and analysis of actions, become the most complex ones to perform, but other factors that benefit them from the other methods, such as the data type they obtain, and the evaluators need to carry it out.</p>
                    <p><xref  ref-type="table" rid="t1">Table 1</xref> convention: Heuristic Evaluation: 1, Cognitive Walkthrough: 2, Action Analysis: 3, Pluralist path: 4, Formal Inspection: 5, Standard Inspection: 6.</p>
                    <p>
                        <table-wrap id="t1">
                            <label>Table 1</label>
                            <caption>
                                <title>Comparative summary among inspection methods [<xref  ref-type="bibr" rid="r25">25</xref>]</title>
                            </caption>
                            <graphic xlink:href="art5545_t1.PNG" />
                        </table-wrap>
                    </p>
                    <p>Thus, it is possible to establish certain comparisons among the test methods, shown in <xref  ref-type="table" rid="t2">Table 2</xref>. The interrogation methods (questionnaires and interviews) are the simplest test methods to perform. Its characteristics allow, with few economic resources and with a preparation that does not take too much time, to obtain satisfactory results regarding the system’s usability evaluation. The interrogation methods are aimed to obtain subjective information from the system under evaluation, obtaining, in many cases, information that cannot be collected through other evaluation methods. </p>
                    <p><xref  ref-type="table" rid="t2">Table 2</xref> convention: Focus Group: 1, Thinking aloud: 2, Constructive Interaction: 3, Questionnaires: 4, Interviews: 5, Surveys: 6, Formative Experiments: 7, Recording of use: 8, Performances Measurement: 9, Driver's method: 10, Test retrospective: 11.</p>
                    <p>
                        <table-wrap id="t2">
                            <label>Table 2</label>
                            <caption>
                                <title>Comparative summary between the Test methods [<xref  ref-type="bibr" rid="r25">25</xref>]</title>
                            </caption>
                            <graphic xlink:href="art5545_t2_1.PNG" />
                            <graphic xlink:href="art5545_t2_2.PNG" />
                        </table-wrap>
                    </p>
                    <p>And finally, it is also possible to establish certain comparisons among the inquiry methods, shown in Table 3. We can say then that contextual inquiry offers a deep understanding of the user's work, but on the contrary, it can only be used in the early stages of development but it generates a lot of information that makes it difficult to assimilate and analyze it, too. The participatory inquiry has the advantages that it does not waste users' time, and that it can be carried out remotely and does not need experts to use it, although it can be time-consuming when dealing with complex system tasks.</p>
                    <p>
                        <table-wrap id="t3">
                            <label>Table 3</label>
                            <caption>
                                <title>Comparative summary among the inquiry methods [<xref  ref-type="bibr" rid="r25">25</xref>]</title>
                            </caption>
                            <graphic xlink:href="art5545_t3.PNG" />
                        </table-wrap>
                    </p>
                </sec>
                <sec>
                    <title>4.1.2.	UAM study object selection</title>
                    <p>UAMs evaluate specific usability aspects to obtain the best processes for evaluating usability [<xref  ref-type="bibr" rid="r25">25</xref>].  Different factors influence this, such as time, simplicity, the type of results, phases within the development cycle, economic resources, users, and experts’ number, among others. </p>
                    <p>Due to the large number of UAMs, it is necessary to select a smaller set of them to be the study object in this research [<xref  ref-type="bibr" rid="r3">3</xref>]. Hence, it has been taken as a reference due to the following criteria: training need-closeness to Software Engineering-user's presence-applicability-contribution vs effort and representativeness. Also, giving them the following values: a little useful, useful, and very useful. Thus, UAMs classification parameters were considered and determined as it appears in the information tables 1, 2, and 3.</p>
                    <sec>
                        <title>Inspection of selection methods</title>
                        <p>The selected inspection methods are:</p>
                        <p>•	Heuristic evaluation</p>
                        <p>•	Cognitive walkthrough</p>
                        <p>The UAM: pluralistic path, standards inspection, and action analysis are not considered in this project. </p>
                        <p>As it is shown in <xref  ref-type="table" rid="t1">Table 1</xref>, the pluralistic path is not considered by its impractical simulation, as well as the fact that it makes it difficult to analyze due to the number of participants per session. The standard inspection is not considered either due to the wide standard of knowledge in terms of the high level of training and the lack of considering actions to be evaluated. The action analysis was not selected, mainly, because it requires a higher-level expert, which is expensive for most organizations to achieve. The formal inspection was also discarded by the need for highly experienced evaluators and a numerous team, which makes the implementation cost higher.</p>
                    </sec>
                    <sec>
                        <title>Test methods selection</title>
                        <p>Below, the instruments of the selected inspection methods are mentioned:</p>
                        <p>•	Formal experiments</p>
                        <p>•	Questionnaires and interviews.</p>
                        <p>•	Constructive interaction</p>
                        <p>•	Driver's method</p>
                        <p>The UAM: thinking out loud, use recording, performance measure, and retrospective tests, are not considered in this research. Based on the information in Table 3, thinking aloud is not considered because it interferes with the normal user’s behavior, which influences the interaction with the system. The use of the recording is not considered because its performance requires a high training level by the evaluators, the effort to establish the equipment is high, and, in addition, it is especially indicated only to analyze websites (level of low applicability). The performance measurement was not selected because it does not ensure the usability target of the obtained measure. Subjective information was neither used such as opinions, attitudes, satisfaction, and the environment used is not natural for users, so it can distort the users’ performance. The retrospective test was not selected because it takes at least twice as much time as any other method.</p>
                        <p>Inquiry methods selection</p>
                        <p>The inquiry methods will not be considered for the VLE usability evaluation methods combination because they were discarded in the related works [<xref  ref-type="bibr" rid="r24">24</xref>], [<xref  ref-type="bibr" rid="r16"></xref>16], [<xref  ref-type="bibr" rid="r19">19</xref>], [<xref  ref-type="bibr" rid="r15">15</xref>], [<xref  ref-type="bibr" rid="r20">20</xref>], [<xref  ref-type="bibr" rid="r21">21].</xref></p>
                        <p>It should be highlighted that the UAMs initial proposed selection is here linked to fundamental usability aspects and to different variables that must be considered in the development. The chosen methods could be applied correctly, effectively, and simply. However, these methods do not take into account the characteristics of a VLE, and for this reason, they do not evaluate the usability of these features and are mainly focused on a general application. Thus, it is expected that the usability evaluation in VLEs is carried out subsequently under the selected usability metrics that will yield the most appropriate UAMs to evaluate the usability in VLEs, due to the fact that those evaluations consider VLEs usability characteristic-aspects.</p>
                    </sec>
                </sec>
            </sec>
            <sec>
                <title>4.2.	Methodological cycle</title>
                <p>The objective of this cycle is to design an evaluation guide based on the formal combination methods for usability evaluation in virtual learning environments. To do that, the following activities were carried out: a literature review to choose a Metrics set that allows to perform the execution of the chosen UAMs in the previous cycle and to select those that can be applied in the VLE, in the same way, the VLE study object selection is made, followed by the UAM execution in order to choose the methods that make up the combination of the methods for the usability evaluation in VLE, and finally, as a subsequent activity, the guide that will be validated in the evaluation cycle will be selected.</p>
                <p>For the result analysis, it is necessary to define a metrics set that allows measuring the obtained results from the UAM study target execution objectively. For this, after an observation process and a literature review [<xref  ref-type="bibr" rid="r26">26</xref>], a metrics series was obtained from the different evaluation and execution methods, which were grouped into the following characteristics:</p>
                <p>Feature N° 1: Usability problem detections</p>
                <p>•	Total number of identified problems</p>
                <p>•	Critical / severe number of problems</p>
                <p>•	Frequent number of problems </p>
                <p>•	NOT critical number of problems</p>
                <p>•	Problems per functionality number</p>
                <p>Feature N° 2: Human resource</p>
                <p>•	Experts/evaluators number</p>
                <p>•	Users quantity</p>
                <p>•	Involved number</p>
                <p>•	Experts/ evaluators experience (in years)</p>
                <p>Feature N° 3: Equipment</p>
                <p>•	Required amount of software tools / technologies </p>
                <p>•	Required amount of hardware devices </p>
                <p>•	Required amount of materials</p>
                <p>Feature N° 4: Time</p>
                <p>•	Time used to complete a task</p>
                <p>•	Invested time to recover from errors</p>
                <p>•	Time used to complete the method</p>
                <p>•	Time used to complete the planning stage</p>
                <p>•	Time used to complete the execution stage </p>
                <p>•	Time used to complete the analysis of the results</p>
                <p>Feature N° 5: Task</p>
                <p>•	Number of proposed tasks</p>
                <p>•	Number of completed tasks number</p>
                <p>•	Completed tasks per user’s profile number</p>
                <p>•	Completed tasks percentage</p>
                <p>After defining the preliminary metrics set, a survey was drawn up in order to identify; according to the experience and the expert's group knowledge, the most relevant metrics that would allow choosing the UAM for VLE and which allows to carry out the result analysis. The survey was developed using the SUS (System Usability Scale) system [<xref  ref-type="bibr" rid="r27">27</xref>] so, each question has 5 response options. A consensus was made by 10 experts in usability evaluations of interactive systems (who perform at least 3 evaluations per year).</p>
                <sec>
                    <title>4.2.1.	Metrics selection</title>
                    <p>Once the survey's results were collected and processed (including averages and standard deviation), the most relevant metrics were identified according to their high averages. These metrics are those that were rated as "important" and "very important" based on the participants’ experience who completed the surveys. The identified metrics that correspond to the characteristics are usability problem detection, human resources, and time. However, when making the analysis among the UAMs, the generated metrics by the methods should be considered, so, the human resource metrics will not be considered as criteria to discriminate between the UAMs study object. The reason is that these metrics are not related to the evaluation method itself, but to a test session where it is used, which is different. For example, the people who involved numbers in the method execution (Involved quantity metrics) should not be a criterion to compare among several UAMs because it would be attributed to a metrics value that is not generated by the method itself (the method requirements or work requirements thereof). The same happens with the experts’/evaluators’ experience metrics (in years) since the evaluators’ experience participating in a method does not reveal anything about it.</p>
                    <p>The selected metrics correspond to base (or direct) measures according to the measurement theory. This indicates that they do not depend on any other measure and which form measurement form is a measurement method [<xref  ref-type="bibr" rid="r28">28</xref>]. On the other hand, the metrics that belongs to the usability problem detections feature is associated with an absolute scale type [<xref  ref-type="bibr" rid="r29">29</xref>] because there is only one possible way to measure: counting; while the time feature metrics is associated with a ratio scale type [<xref  ref-type="bibr" rid="r30">30</xref>], which has a fixed reference point: zero (no value can be less than zero).</p>
                    <p>Now, once the measurement process is done, the metrics values are not between 0 and 1 (exceed 1), so a standardization table must be used to take them to a value scale between 0 and 1. After normalizing the values, the metrics generate a real number that is in a range between 0 to 1. Thus, the metrics provide positive evidence if the values are close to 1. In the case of the metric related to time, in which "good" values are those that approach to zero, it would be necessary to perform a calculation like this: Vc = 1 - V. So, when the value of the metric (V) is closer to zero, the complementary value (Vc) will be closer to 1, so, the metrics can be taken to positive values (or increasing). Regarding the metrics corresponding to the time characteristic, a base time has not been established to carry out the planning, execution of the results analysis stages. The reason is the time may vary according to the evaluators' number and users participating in the evaluation process. On the other hand, the UAM stages execution speed planning, execution, and analysis of the results).</p>
                    <p>Here are the metrics that will be considered:</p>
                    <p>•	Number of identified problems </p>
                    <p>•	Number of critical problems</p>
                    <p>•	Number of frequent problems</p>
                    <p>•	Time used to complete the planning stage</p>
                    <p>•	Time used to complete the execution stage</p>
                    <p>•	Time used to complete the analysis of the results</p>
                    <p>As previously mentioned, the metrics that belongs to the human resource feature will not be considered to discriminate among the executed UAMs. On the other hand, regarding the Implicated Quantity metrics, it provides positive evidence when the method involves a user’s number greater than or equal to the established one, several (minimum 3) evaluators, and at least one organization representative. Finally, regarding the experts’/evaluators’ metrics experience (in years). This provides positive evidence the higher it is because it directly influences the quantity and quality of the obtained results in the evaluation of execution methods (inspection and test).</p>
                    <p>According to the above mentioned, the idea of having the final metrics set is to be able to apply these metrics, when executing the UAMs (selected in the conceptual cycle) in a VLE study object and from the values obtained to choose finally the suitable UAMs for the VLE to define the final guide.</p>
                    <p>This is the actual research location, in the project execution, planning some activities has been defined and will allow the final guide construction for the usability evaluation in virtual learning environments, the activities defined for this cycle and they have not been developed yet but they will be mentioned below:</p>
                    <p>•	To identify the Virtual Learning Environments study object</p>
                    <p>•	To execute the usability evaluation methods on the VLE study object with the selected metrics application</p>
                    <p>•	To process the obtained results to identify the appropriate UAMs to make up the combination of the methods for the usability evaluation in Virtual Learning Environments</p>
                    <p>•	To prepare a guide for usability evaluation in VLE based on the combination made in the previous step</p>
                </sec>
            </sec>
            <sec>
                <title>4.3.	Evaluation cycle</title>
                <p>In the evaluation cycle, the case study will be designed and executed to validate the proposed guide, followed by the obtained result analysis, and the redefinition of the guide considering the obtained results.</p>
            </sec>
        </sec>
        <sec>
            <title>5.	Conclusions</title>
            <p>After the literature analysis, it was found that there is no standardization regarding settings of usability evaluation in VLEs. Nowadays, methods have been used in isolation, and with specific criteria to evaluate a product, methods that are not designed to evaluate VLEs usability. Therefore, what is intended to improve with the defined guide in this proposal will contain an evaluation combination method to be applied in virtual learning environments, due to the current boom of these software types of systems and the need to think of a satisfied end-user.</p>
            <p>The selected methods to form the initial combination have been chosen because they are the fittest to evaluate the usability specifically in VLE, this selection has had several evaluation criteria as previously shown and from which it was possible to select of inspection methods: Heuristic Evaluation and Cognitive walkthrough, of the Test methods: Formal experiments, Questionnaires and interviews, Constructive interaction and Driver's method.</p>
            <p>The types of inspection and test methods selected will be applied at later stages of the investigation in a VLE and will be executed together with the previously defined and selected metrics: number of identified problems, number of critical problems, number of frequent problems, time used to complete the planning stage, time used to complete the execution stage, time used to complete the analysis of the results. This in order to make another filter to the methods and leave those that really contribute in the evaluation of a VLE and as a result, it is expected to obtain a final combination that will be the basic guide to apply in a VLE, generating the information of usability required for these contexts.</p>
            <p>Based on the obtained results in this research, we consider that the UAM selection strategy for VLE is a possible way to choose them, without leaving aside other strategies that can contribute with a selection that may fit in a better way to the VLE needs.</p>
        </sec>
	</body>
	<back>
		<ref-list>
			<title>Referencias</title>
			<ref id="r1">
				<mixed-citation>E. Mor, M. Garreta and M. Galofré, Diseño Centrado en el Usuario en Entornos Virtuales de Aprendizaje, de la Usabilidad a la Experiencia del Estudiante, 2007</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
				    		<surname>Mor</surname>
				    		<given-names>E.</given-names>
				    	</name>
                        <name>
				    		<surname>Garreta</surname>
				    		<given-names>M.</given-names>
				    	</name>
                        <name>
				    		<surname>Galofré</surname>
				    		<given-names>M.</given-names>
				    	</name>
				    </person-group>
				    <year>2007</year>
                    <source>Diseño Centrado en el Usuario en Entornos Virtuales de Aprendizaje, de la Usabilidad a la Experiencia del Estudiante</source>
			    </element-citation>
            </ref>
            <ref id="r2">
				<mixed-citation>H. Sharp, Y. Rogers and J. Preece, Interaction Design Beyond Human - Computer Interaction, 2 ed., Wiley, John &amp; Sons, Incorporated, 2007</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
				    		<surname>Sharp</surname>
				    		<given-names>H.</given-names>
				    	</name>
                        <name>
				    		<surname>Rogers</surname>
				    		<given-names>Y.</given-names>
				    	</name>
                        <name>
				    		<surname>Preece</surname>
				    		<given-names>J.</given-names>
				    	</name>
				    </person-group>
				    <year>2007</year>
                    <source>Interaction Design Beyond Human - Computer Interaction, 2 ed</source>
                    <publisher-name>Wiley, John &amp; Sons, Incorporated</publisher-name>
			    </element-citation>
            </ref>
            <ref id="r3">
				<mixed-citation>X. Ferré, Marco de Integración de la Usabilidad en el proceso de Desarrollo de Software, Madrid, 2005</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
				    		<surname>Ferré</surname>
				    		<given-names>X.</given-names>
				    	</name>
				    </person-group>
				    <year>2005</year>
                    <source>Marco de Integración de la Usabilidad en el proceso de Desarrollo de Software</source>
                    <publisher-loc>Madrid</publisher-loc>
			    </element-citation>
            </ref>
            <ref id="r4">
				<mixed-citation>W. Sanchez, "La usabilidad en ingeniería de Software: definición y caracteristicas," Ing-novación , 2011</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
				    		<surname>Sanchez</surname>
				    		<given-names>W.</given-names>
				    	</name>
				    </person-group>
				    <year>2011</year>
                    <source>La usabilidad en ingeniería de Software: definición y caracteristicas</source>
                    <publisher-name>Ing-novación</publisher-name>
			    </element-citation>
            </ref>
            <ref id="r5">
				<mixed-citation>J. Nielsen, "The usability engineering lifecycle," Computer, vol. 25, no. 3, pp. 12-22, 1992</mixed-citation>
			    <element-citation  publication-type="journal">
				    <person-group person-group-type="author">
				    	<name>
				    		<surname>Nielsen</surname>
				    		<given-names>J.</given-names>
				    	</name>
				    </person-group>
				    <year>1992</year>
                    <article-title>The usability engineering lifecycle</article-title>
                    <source>Computer</source>
                    <volume>25</volume>
                    <issue>3</issue>
                    <fpage>12</fpage>
                    <lpage>22</lpage>
			    </element-citation>
            </ref>
            <ref id="r6">
				<mixed-citation>A. Dix, J. E. Finlay, G. D. Abowd and R. Beale, Human-computer interaction: Prentice hall, Inc, England, 2004</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
				    		<surname>Dix</surname>
				    		<given-names>A.</given-names>
				    	</name>
                        <name>
				    		<surname>Finlay</surname>
				    		<given-names>J. E.</given-names>
				    	</name>
                        <name>
				    		<surname>Abowd</surname>
				    		<given-names>G. D.</given-names>
				    	</name>
                        <name>
				    		<surname>Beale</surname>
				    		<given-names>R.</given-names>
				    	</name>
				    </person-group>
				    <year>2004</year>
                    <source>Human-computer interaction: Prentice hall</source>
                    <publisher-name>Inc</publisher-name>
                    <publisher-loc>Englad</publisher-loc>
			    </element-citation>
            </ref>
            <ref id="r7">
				<mixed-citation>T. Granollers, MPIu+a una metodología que integra la ingeniería del software, la interacción persona-ordenador y la accesibilidad en el contexto de equipos de desarrollo multidisciplinares, LLeida, 2004</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
				    		<surname>Granollers</surname>
				    		<given-names>T.</given-names>
				    	</name>
				    </person-group>
				    <year>2004</year>
                    <source>MPIu+a una metodología que integra la ingeniería del software, la interacción persona-ordenador y la accesibilidad en el contexto de equipos de desarrollo multidisciplinares</source>
                    <publisher-name>LLeida</publisher-name>
			    </element-citation>
            </ref>
            <ref id="r8">
				<mixed-citation>A. Solano, Propuesta metodológica para la evaluación colaborativa de la usabilidad de aplicaciones de Televisión Digital Interactiva, Popayán, Cauca, 2012</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
				    		<surname>Solano</surname>
				    		<given-names>A.</given-names>
				    	</name>
				    </person-group>
				    <year>2012</year>
                    <source>Propuesta metodológica para la evaluación colaborativa de la usabilidad de aplicaciones de Televisión Digital Interactiva</source>
                    <publisher-loc>Popayán, Cauca</publisher-loc>
			    </element-citation>
            </ref>
            <ref id="r9">
				<mixed-citation>M. E. Alva Obeso, Metodología de Medición y Evaluación de la Usabilidad en Sitios Web Educativos, Oviedo, 2005</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
				    		<surname>Alva Obeso</surname>
				    		<given-names>M. E.</given-names>
				    	</name>
				    </person-group>
				    <year>2005</year>
                    <source>Metodología de Medición y Evaluación de la Usabilidad en Sitios Web Educativos</source>
                    <publisher-name>Oviedo</publisher-name>
			    </element-citation>
            </ref>
            <ref id="r10">
				<mixed-citation>N. Bevan and M. Macleod, "Usability measurement in context," Behaviour &amp; information technology, vol. 13, no. 1, pp. 132-145, 1994</mixed-citation>
			    <element-citation  publication-type="journal">
				    <person-group person-group-type="author">
				    	<name>
				    		<surname>Bevan</surname>
				    		<given-names>N.</given-names>
				    	</name>
                        <name>
				    		<surname>Macleod</surname>
				    		<given-names>M.</given-names>
				    	</name>
				    </person-group>
				    <year>1994</year>
                    <article-title>Usability measurement in context</article-title>
                    <source>Behaviour &amp; information technology</source>
                    <volume>13</volume>
                    <issue>1</issue>
                    <fpage>132</fpage>
                    <lpage>145</lpage>
			    </element-citation>
            </ref>
            <ref id="r11">
				<mixed-citation>S. Thüer and A. Ferreira Szpiniak, "Entornos Virtuales de Aprendizaje: Diseño de experiencias de usuario para la web 2.0," in Conferencia Internacional ICDE – UNQ 2011 “Educación a distancia, TIC y universidad: calidad, equidad y acceso a la educación superior, Buenos Aires,Argentina, 2011</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
				    		<surname>Thüer</surname>
				    		<given-names>S.</given-names>
				    	</name>
                        <name>
				    		<surname>Ferreira Szpiniak</surname>
				    		<given-names>A.</given-names>
				    	</name>
				    </person-group>
				    <year>2011</year>
                    <source>"Entornos Virtuales de Aprendizaje: Diseño de experiencias de usuario para la web 2.0," in Conferencia Internacional ICDE – UNQ 2011 “Educación a distancia, TIC y universidad: calidad, equidad y acceso a la educación superior</source>
                    <publisher-loc>Buenos Aires,Argentina</publisher-loc>
			    </element-citation>
            </ref>
            <ref id="r12">
				<mixed-citation>R. Cicala, M. Perazzo, F. Bordignon and C. J. De Salvo, Investigación sobre entornos virtuales de aprendizaje utilizados para la enseñanza en profesorados y universidades nacionales, Buenos Aires: Unipe, 2011, p. 96</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
				    		<surname>Cicala</surname>
				    		<given-names>R.</given-names>
				    	</name>
                        <name>
				    		<surname>Perazzo</surname>
				    		<given-names>M.</given-names>
				    	</name>
                        <name>
				    		<surname>Bordignon</surname>
				    		<given-names>F.</given-names>
				    	</name>
                        <name>
				    		<surname>De Salvo</surname>
				    		<given-names>C. J.</given-names>
				    	</name>
				    </person-group>
				    <year>2011</year>
                    <source>Investigación sobre entornos virtuales de aprendizaje utilizados para la enseñanza en profesorados y universidades nacionales</source>
                    <fpage>96</fpage>
                    <publisher-name>Unipe</publisher-name>
                    <publisher-loc>Buenos Aires,Argentina</publisher-loc>
			    </element-citation>
            </ref>
            <ref id="r13">
				<mixed-citation>ISO, International Software Quality Standard, ISO/IEC 25010, Systems and software engineering - Systems and software Quality Requirements and Evaluation (SQuaRE) - Systems and software quality models, ed,, 2011</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<collab>ISO, International Software Quality Standard</collab>
				    </person-group>
				    <year>2011</year>
                    <source>ISO/IEC 25010, Systems and software engineering - Systems and software Quality Requirements and Evaluation (SQuaRE) - Systems and software quality models, ed</source>
			    </element-citation>
            </ref>
            <ref id="r14">
				<mixed-citation>J. Offutt, "Quality Attributes of Web Software Applications," IEEE Software: Special Issue on Software Engineering of Internet Software, p. 25‐32, 2002</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
                            <surname>Offutt</surname>
                            <given-names>J.</given-names>
                        </name>
				    </person-group>
				    <year>2002</year>
                    <source>"Quality Attributes of Web Software Applications," IEEE Software: Special Issue on Software Engineering of Internet Software</source>
                    <fpage>25</fpage>
                    <lpage>32</lpage>
			    </element-citation>
            </ref>
            <ref id="r15">
				<mixed-citation>J. Reyes Vera, M. I. Berdugo and L. Machuca Villegas, "Evaluación de usablidad de un sistema de administración de cursos basado en la plataforma LingWeb," Ingenieare, vol. 24, no. 3, pp. 435-444, 2016</mixed-citation>
			    <element-citation  publication-type="journal">
				    <person-group person-group-type="author">
				    	<name>
                            <surname>Reyes Vera</surname>
                            <given-names>J.</given-names>
                        </name>
                        <name>
                            <surname>Berdugo</surname>
                            <given-names>M. I.</given-names>
                        </name>
                        <name>
                            <surname>Machuca Villegas</surname>
                            <given-names>L.</given-names>
                        </name>
				    </person-group>
				    <year>2016</year>
                    <article-title>Evaluación de usablidad de un sistema de administración de cursos basado en la plataforma LingWeb</article-title>
                    <source>Ingenieare</source>
                    <volume>24</volume>
                    <issue>3</issue>
                    <fpage>435</fpage>
                    <lpage>444</lpage>
			    </element-citation>
            </ref>
            <ref id="r16">
				<mixed-citation>R. Otaiza, "Metodología de evaluación de usabilidad para aplicaciones web transaccionales," Tesis de Maestría, Escuela de Ingeniería Informática, Pontificia Universidad Católica de Valparaíso, Valparaíso, Chile, 2008</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
                            <surname>Otaiza</surname>
                            <given-names>R.</given-names>
                        </name>
				    </person-group>
				    <year>2008</year>
                    <source>"Metodología de evaluación de usabilidad para aplicaciones web transaccionales," Tesis de Maestría</source>
                    <publisher-name>Escuela de Ingeniería Informática, Pontificia Universidad Católica de Valparaíso</publisher-name>
                    <publisher-loc>Valparaíso, Chile</publisher-loc>
			    </element-citation>
            </ref>
            <ref id="r17">
				<mixed-citation>J. D. Pinto, V. Agredo-Delgado and C. Collazos, "Construyendo una guía para la evaluación de la usabilidad en EVAs," Campus Virtuales, vol. 7, no. 2, pp. 93-104, 2018</mixed-citation>
			    <element-citation  publication-type="journal">
				    <person-group person-group-type="author">
				    	<name>
                            <surname>Pinto</surname>
                            <given-names>J. D.</given-names>
                        </name>
                        <name>
                            <surname>Agredo-Delgado</surname>
                            <given-names>V.</given-names>
                        </name>
                        <name>
                            <surname>Collazos</surname>
                            <given-names>C.</given-names>
                        </name>
				    </person-group>
				    <year>2018</year>
                    <article-title>Construyendo una guía para la evaluación de la usabilidad en EVAs</article-title>
                    <source>Campus Virtuales</source>
                    <volume>7</volume>
                    <issue>2</issue>
                    <fpage>93</fpage>
                    <lpage>104</lpage>
			    </element-citation>
            </ref>
            <ref id="r18">
				<mixed-citation>R. Otaiza, C. Rusu and S. Roncagliolo, Evaluating the usability of transactional Web Sites, Saint Maarten: Third International Conference on Advances in ComputerHuman Interactions (ACHI'10), 2010</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
                            <surname>Otaiza</surname>
                            <given-names>R.</given-names>
                        </name>
                        <name>
                            <surname>Rusu</surname>
                            <given-names>C.</given-names>
                        </name>
                        <name>
                            <surname>Roncagliolo</surname>
                            <given-names>S.</given-names>
                        </name>
				    </person-group>
				    <year>2010</year>
                    <source>Evaluating the usability of transactional Web Sites, Saint Maarten: Third International Conference on Advances in ComputerHuman Interactions (ACHI'10)</source>
			    </element-citation>
            </ref>
            <ref id="r19">
				<mixed-citation>D. Plantak Vukovac, V. Kirinic and B. Klicek, "A comparision of usability evaluation methods for E- learning systems," in DAAAM international scientific book, 2010, pp. 271-288</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
                            <surname>Plantak Vukovac</surname>
                            <given-names>D.</given-names>
                        </name>
                        <name>
                            <surname>Kirinic</surname>
                            <given-names>V.</given-names>
                        </name>
                        <name>
                            <surname>Klicek</surname>
                            <given-names>B.</given-names>
                        </name>
				    </person-group>
				    <year>2010</year>
                    <source>"A comparision of usability evaluation methods for E- learning systems," in DAAAM international scientific book</source>
                    <fpage>271</fpage>
                    <lpage>288</lpage>
			    </element-citation>
            </ref>
            <ref id="r20">
				<mixed-citation>A. Ferreira Szpiniak and C. V. Sanz, Diseño de un modelo de evaluación de entornos virtuales de enseñanza y aprendizaje basado en la usabilidad, La plata, 2013</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
                            <surname>Ferreira Szpiniak</surname>
                            <given-names>A.</given-names>
                        </name>
                        <name>
                            <surname>Sanz</surname>
                            <given-names>C. V.</given-names>
                        </name>
                        <name>
                            <surname>Klicek</surname>
                            <given-names>B.</given-names>
                        </name>
				    </person-group>
				    <year>2013</year>
                    <source>Diseño de un modelo de evaluación de entornos virtuales de enseñanza y aprendizaje basado en la usabilidad</source>
                    <publisher-loc>La Plata</publisher-loc>
			    </element-citation>
            </ref>
            <ref id="r21">
				<mixed-citation>M. P. Ponce Martinez, "Usabilidad en un sistema E-Learning," in 7 - th Europe conference E-COMM-LINE, Bucharest, 2013</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
                            <surname>Ponce Martinez</surname>
                            <given-names>M. P.</given-names>
                        </name>
				    </person-group>
				    <year>2013</year>
                    <source>"Usabilidad en un sistema E-Learning," in 7 - th Europe conference E-COMM-LINE</source>
                    <publisher-loc>Bucharest</publisher-loc>
			    </element-citation>
            </ref>
            <ref id="r22">
				<mixed-citation>F. J. Pino, M. Piattini and G. Horta Travassos, "Managing and developing distributed research projects in software engineering by means of actionresearch," Revista Facultad de Ingeniería Universidad de Antioquia, pp. 61-74, 2013</mixed-citation>
			    <element-citation  publication-type="journal">
				    <person-group person-group-type="author">
				    	<name>
                            <surname>Pino</surname>
                            <given-names>F. J.</given-names>
                        </name>
                        <name>
                            <surname>Piattini</surname>
                            <given-names>M.</given-names>
                        </name>
                        <name>
                            <surname>Horta Travassos</surname>
                            <given-names>G.</given-names>
                        </name>
				    </person-group>
				    <year>2013</year>
                    <article-title>Managing and developing distributed research projects in software engineering by means of actionresearch</article-title>
                    <source>Revista Facultad de Ingeniería Universidad de Antioquia</source>
                    <fpage>61</fpage>
                    <lpage>74</lpage>
			    </element-citation>
            </ref>
            <ref id="r23">
				<mixed-citation>J. Muñoz, Y. Hernández, V. Bustos, A. Aranda, M. Calderon, C. Collazos, Y. Mendez, A. Solano, J. Guzman, F. Alvarez, R. Mendoza, J. Guerrero, J. M. Gonzalez, L. Rodriguez, T. Granollers, R. Gil and D. Cespedes, Temas de diseño en interacción humano computadora, Guayaquil: Proyecto LATin-Iniciativa Latinoamericana de Libros de Texto Abierto, 2014</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
                            <surname>Muñoz</surname>
                            <given-names>J.</given-names>
                        </name>
                        <name>
                            <surname>Hernández</surname>
                            <given-names>Y.</given-names>
                        </name>
                        <name>
                            <surname>Bustos</surname>
                            <given-names>V.</given-names>
                        </name>
                        <name>
                            <surname>Aranda</surname>
                            <given-names>A.</given-names>
                        </name>
                        <name>
                            <surname>Calderon</surname>
                            <given-names>M.</given-names>
                        </name>
                        <name>
                            <surname>Collazos</surname>
                            <given-names>C.</given-names>
                        </name>
                        <name>
                            <surname>Mendez</surname>
                            <given-names>Y.</given-names>
                        </name>
                        <name>
                            <surname>Solano</surname>
                            <given-names>A.</given-names>
                        </name>
                        <name>
                            <surname>Guzman</surname>
                            <given-names>J.</given-names>
                        </name>
                        <name>
                            <surname>Alvarez</surname>
                            <given-names>F.</given-names>
                        </name>
                        <name>
                            <surname>Mendoza</surname>
                            <given-names>R.</given-names>
                        </name>
                        <name>
                            <surname>Guerrero</surname>
                            <given-names>J.</given-names>
                        </name>
                        <name>
                            <surname>Gonzalez</surname>
                            <given-names>J. M.</given-names>
                        </name>
                        <name>
                            <surname>Rodriguez</surname>
                            <given-names>L.</given-names>
                        </name>
                        <name>
                            <surname>Granollers</surname>
                            <given-names>T.</given-names>
                        </name>
                        <name>
                            <surname>Gil</surname>
                            <given-names>R.</given-names>
                        </name>
                        <name>
                            <surname>Cespedes</surname>
                            <given-names>D.</given-names>
                        </name>
				    </person-group>
				    <year>2014</year>
                    <source>Temas de diseño en interacción humano computadora</source>
                    <publisher-name>Proyecto LATin-Iniciativa Latinoamericana de Libros de Texto Abierto</publisher-name>
                    <publisher-loc>Guayaquil</publisher-loc>
			    </element-citation>
            </ref>
            <ref id="r24">
				<mixed-citation>G. Cockton, A. Woolrych and D. Lavery, "Inspection-based evaluations," in The Human-Computer Interaction Handbook, 2nd ed., Lawrence Erlbaum Associates, 2008, pp. 1171-1190</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
                            <surname>Cockton</surname>
                            <given-names>G.</given-names>
                        </name>
                        <name>
                            <surname>Woolrych</surname>
                            <given-names>A.</given-names>
                        </name>
                        <name>
                            <surname>Lavery</surname>
                            <given-names>D.</given-names>
                        </name>
				    </person-group>
				    <year>2008</year>
                    <source>"Inspection-based evaluations," in The Human-Computer Interaction Handbook, 2nd ed</source>
                    <publisher-name>Lawrence Erlbaum Associates</publisher-name>
                    <fpage>1171</fpage>
                    <lpage>1190</lpage>
			    </element-citation>
            </ref>
            <ref id="r25">
				<mixed-citation>M. F. Lopez, "Métodos de evaluación de Usabilidad para aplicaciones web transaccionales," Informe final de proyecto para optar por el titulo profesional de ingeniero civil en informática, Valparaiso, 2012</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
                            <surname>Lopez</surname>
                            <given-names>M. F.</given-names>
                        </name>
				    </person-group>
				    <year>2012</year>
                    <source>"Métodos de evaluación de Usabilidad para aplicaciones web transaccionales," Informe final de proyecto para optar por el titulo profesional de ingeniero civil en informática</source>
                    <publisher-loc>Valparaíso</publisher-loc>
			    </element-citation>
            </ref>
            <ref id="r26">
				<mixed-citation>ISO, International Standard ISO/IEC 9241, Ergonomic requirements for office work with visual display terminals, 1998</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<collab>ISO, International Standard</collab>
				    </person-group>
				    <year>1998</year>
                    <source>ISO/IEC 9241, Ergonomic requirements for office work with visual display terminals</source>
			    </element-citation>
            </ref>
            <ref id="r27">
				<mixed-citation>W. Albert and T. Tullis, Measuring the user experience: collecting, analyzing, and presenting usability metrics, Newnes, 2013</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
                            <surname>Albert</surname>
                            <given-names>W.</given-names>
                        </name>
                        <name>
                            <surname>Tullis</surname>
                            <given-names>T.</given-names>
                        </name>
				    </person-group>
				    <year>2013</year>
                    <source>Measuring the user experience: collecting, analyzing, and presenting usability metrics</source>
                    <publisher-name>Newnes</publisher-name>
			    </element-citation>
            </ref>
            <ref id="r28">
				<mixed-citation>C. Martinie, P. Palanque and M. Winckler, "Structuring and composition mechanisms to address scalability issues in task models," in FIP Conference on Human-Computer Interaction, Berlin, Heidelberg, 2011</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
                            <surname>Martinie</surname>
                            <given-names>C.</given-names>
                        </name>
                        <name>
                            <surname>Palanque</surname>
                            <given-names>P.</given-names>
                        </name>
                        <name>
                            <surname>Winckler</surname>
                            <given-names>M.</given-names>
                        </name>
				    </person-group>
				    <year>2011</year>
                    <source>"Structuring and composition mechanisms to address scalability issues in task models," in FIP Conference on Human-Computer Interaction</source>
                    <publisher-name>Heidelberg</publisher-name>
                    <publisher-loc>Berlín</publisher-loc>
			    </element-citation>
            </ref>
            <ref id="r29">
				<mixed-citation>J. Rubin and D. Chisnell, "How to plan, design, and conduct effective tests," in Handbook of usability testing, Wiley Publishing, Inc, 2008</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
                            <surname>Rubin</surname>
                            <given-names>J.</given-names>
                        </name>
                        <name>
                            <surname>Chisnell</surname>
                            <given-names>D.</given-names>
                        </name>
				    </person-group>
				    <year>2008</year>
                    <source>"How to plan, design, and conduct effective tests," in Handbook of usability testing</source>
                    <publisher-name>Wiley Publishing, Inc</publisher-name>
			    </element-citation>
            </ref>
            <ref id="r30">
				<mixed-citation>M. G. Piattini Velthuis, F. Ó. García Rubio, J. Garzás Parra and M. F. Genero Bocco, Medición y estimación del software: Técnicas y métodos para mejorar la calidad y productividad del software, Alfaomega, 2008</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
                            <surname>Piattini Velthuis</surname>
                            <given-names>M. G.</given-names>
                        </name>
                        <name>
                            <surname>García Rubio</surname>
                            <given-names>F. Ó.</given-names>
                        </name>
                        <name>
                            <surname>Garzás Parra</surname>
                            <given-names>J.</given-names>
                        </name>
                        <name>
                            <surname>Genero Bocco</surname>
                            <given-names>M. F.</given-names>
                        </name>
				    </person-group>
				    <year>2008</year>
                    <source>Medición y estimación del software: Técnicas y métodos para mejorar la calidad y productividad del software</source>
                    <publisher-name>Alfaomega</publisher-name>
			    </element-citation>
            </ref>
            <ref id="r31">
				<mixed-citation>ISO, International Standard ISO/IEC 9126, Software engineering-Product Quality, 2001</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<collab>ISO, International Standard</collab>
				    </person-group>
				    <year>2011</year>
                    <source>ISO/IEC 9126, Software engineering-Product Quality</source>
			    </element-citation>
            </ref>
            <ref id="r32">
				<mixed-citation>S. Riihiaho, "Experiences with usability evaluation," Helsinki University of Technology - Laboratory of Information Processing Science., 2000</mixed-citation>
			    <element-citation  publication-type="book">
				    <person-group person-group-type="author">
				    	<name>
                            <surname>Riihiaho</surname>
                            <given-names>S.</given-names>
                        </name>
				    </person-group>
				    <year>2000</year>
                    <source>"Experiences with usability evaluation," Helsinki University of Technology - Laboratory of Information Processing Science</source>
			    </element-citation>
            </ref>
       	</ref-list>
	</back>
</article>