diff --git a/app/components/consent-template005/template.hbs b/app/components/consent-template005/template.hbs new file mode 100644 index 00000000..edee2249 --- /dev/null +++ b/app/components/consent-template005/template.hbs @@ -0,0 +1,156 @@ +
+ + Consent to participate in research: {{experiment.name}} + +
+ +
+ \ No newline at end of file diff --git a/app/components/exp-lookit-images-audio/component.js b/app/components/exp-lookit-images-audio/component.js index da12d3f9..bdeb0841 100644 --- a/app/components/exp-lookit-images-audio/component.js +++ b/app/components/exp-lookit-images-audio/component.js @@ -10,330 +10,10 @@ import isColor from '../../utils/is-color'; let { $ } = Ember; - - -/** - * @module exp-player - * @submodule frames - */ - /** * Frame to display image(s) and play audio, with optional video recording. Options allow * customization for looking time, storybook, forced choice, and reaction time type trials, * including training versions where children (or parents) get feedback about their responses. - * - * This can be used in a variety of ways - for example: - * - * - Display an image for a set amount of time and measure looking time - * - * - Display two images for a set amount of time and play audio for a - * looking-while-listening paradigm - * - * - Show a "storybook page" where you show images and play audio, having the parent/child - * press 'Next' to proceed. If desired, - * images can appear and be highlighted at specific times - * relative to audio. E.g., the audio might say "This [image of Remy appears] is a boy - * named Remy. Remy has a little sister [image of Zenna appears] named Zenna. - * [Remy highlighted] Remy's favorite food is brussel sprouts, but [Zenna highlighted] - * Zenna's favorite food is ice cream. [Remy and Zenna both highlighted] Remy and Zenna - * both love tacos!" - * - * - Play audio asking the child to choose between two images by pointing or answering - * verbally. Show text for the parent about how to help and when to press Next. - * - * - Play audio asking the child to choose between two images, and require one of those - * images to be clicked to proceed (see "choiceRequired" option). - * - * - Measure reaction time as the child is asked to choose a particular option on each trial - * (e.g., a central cue image is shown first, then two options at a short delay; the child - * clicks on the one that matches the cue in some way) - * - * - Provide audio and/or text feedback on the child's (or parent's) choice before proceeding, - * either just to make the study a bit more interactive ("Great job, you chose the color BLUE!") - * or for initial training/familiarization to make sure they understand the task. Some - * images can be marked as the "correct" answer and a correct answer required to proceed. - * If you'd like to include some initial training questions before your test questions, - * this is a great way to do it. - * - * In general, the images are displayed in a designated region of the screen with aspect - * ratio 7:4 (1.75 times as wide as it is tall) to standardize display as much as possible - * across different monitors. If you want to display things truly fullscreen, you can - * use `autoProceed` and not provide `parentText` so there's nothing at the bottom, and then - * set `maximizeDisplay` to true. - * - * Webcam recording may be turned on or off; if on, stimuli are not displayed and audio is - * not started until recording begins. (Using the frame-specific `isRecording` property - * is good if you have a smallish number of test trials and prefer to have separate video - * clips for each. For reaction time trials or many short trials, you will likely want - * to use session recording instead - i.e. start the session recording before the first trial - * and end on the last trial - to avoid the short delays related to starting/stopping the video.) - * - * This frame is displayed fullscreen, but is not paused or otherwise disabled if the - * user leaves fullscreen. A button appears prompting the user to return to - * fullscreen mode. - * - * Any number of images may be placed on the screen, and their position - * specified. (Aspect ratio will be the same as the original image.) - * - * The examples below show a variety of usages, corresponding to those shown in the video. - * - * image-1: Single image displayed full-screen, maximizing area on monitor, for 8 seconds. - * - * image-2: Single image displayed at specified position, with 'next' button to move on - * - * image-3: Image plus audio, auto-proceeding after audio completes and 4 seconds go by - * - * image-4: Image plus audio, with 'next' button to move on - * - * image-5: Two images plus audio question asking child to point to one of the images, - * demonstrating different timing of image display & highlighting of images during audio - * - * image-6: Three images with audio prompt, family has to click one of two to continue - * - * image-7: Three images with audio prompt, family has to click correct one to continue - - * audio feedback on incorrect answer - * - * image-8: Three images with audio prompt, family has to click correct one to continue - - * text feedback on incorrect answer - * - * - -```json - "frames": { - "image-1": { - "kind": "exp-lookit-images-audio", - "images": [ - { - "id": "cats", - "src": "two_cats.png", - "position": "fill" - } - ], - "baseDir": "https://www.mit.edu/~kimscott/placeholderstimuli/", - "autoProceed": true, - "doRecording": true, - "durationSeconds": 8, - "maximizeDisplay": true - }, - "image-2": { - "kind": "exp-lookit-images-audio", - "images": [ - { - "id": "cats", - "src": "three_cats.JPG", - "top": 10, - "left": 30, - "width": 40 - } - ], - "baseDir": "https://www.mit.edu/~kimscott/placeholderstimuli/", - "autoProceed": false, - "doRecording": true, - "parentTextBlock": { - "text": "Some explanatory text for parents", - "title": "For parents" - } - }, - "image-3": { - "kind": "exp-lookit-images-audio", - "audio": "wheresremy", - "images": [ - { - "id": "remy", - "src": "wheres_remy.jpg", - "position": "fill" - } - ], - "baseDir": "https://www.mit.edu/~kimscott/placeholderstimuli/", - "audioTypes": [ - "mp3", - "ogg" - ], - "autoProceed": true, - "doRecording": false, - "durationSeconds": 4, - "parentTextBlock": { - "text": "Some explanatory text for parents", - "title": "For parents" - }, - "showProgressBar": true - }, - "image-4": { - "kind": "exp-lookit-images-audio", - "audio": "peekaboo", - "images": [ - { - "id": "remy", - "src": "peekaboo_remy.jpg", - "position": "fill" - } - ], - "baseDir": "https://www.mit.edu/~kimscott/placeholderstimuli/", - "audioTypes": [ - "mp3", - "ogg" - ], - "autoProceed": false, - "doRecording": false, - "parentTextBlock": { - "text": "Some explanatory text for parents", - "title": "For parents" - } - }, - "image-5": { - "kind": "exp-lookit-images-audio", - "audio": "remyzennaintro", - "images": [ - { - "id": "remy", - "src": "scared_remy.jpg", - "position": "left" - }, - { - "id": "zenna", - "src": "love_zenna.jpg", - "position": "right", - "displayDelayMs": 1500 - } - ], - "baseDir": "https://www.mit.edu/~kimscott/placeholderstimuli/", - "highlights": [ - { - "range": [ - 0, - 1.5 - ], - "imageId": "remy" - }, - { - "range": [ - 1.5, - 3 - ], - "imageId": "zenna" - } - ], - "autoProceed": false, - "doRecording": true, - "parentTextBlock": { - "text": "Some explanatory text for parents", - "title": "For parents" - } - }, - "image-6": { - "kind": "exp-lookit-images-audio", - "audio": "matchremy", - "images": [ - { - "id": "cue", - "src": "happy_remy.jpg", - "position": "center", - "nonChoiceOption": true - }, - { - "id": "option1", - "src": "happy_zenna.jpg", - "position": "left", - "displayDelayMs": 2000 - }, - { - "id": "option2", - "src": "annoyed_zenna.jpg", - "position": "right", - "displayDelayMs": 2000 - } - ], - "baseDir": "https://www.mit.edu/~kimscott/placeholderstimuli/", - "autoProceed": false, - "doRecording": true, - "choiceRequired": true, - "parentTextBlock": { - "text": "Some explanatory text for parents", - "title": "For parents" - }, - "canMakeChoiceBeforeAudioFinished": true - }, - "image-7": { - "kind": "exp-lookit-images-audio", - "audio": "matchzenna", - "images": [ - { - "id": "cue", - "src": "sad_zenna.jpg", - "position": "center", - "nonChoiceOption": true - }, - { - "id": "option1", - "src": "surprised_remy.jpg", - "position": "left", - "feedbackAudio": "negativefeedback", - "displayDelayMs": 3500 - }, - { - "id": "option2", - "src": "sad_remy.jpg", - "correct": true, - "position": "right", - "displayDelayMs": 3500 - } - ], - "baseDir": "https://www.mit.edu/~kimscott/placeholderstimuli/", - "autoProceed": false, - "doRecording": true, - "choiceRequired": true, - "parentTextBlock": { - "text": "Some explanatory text for parents", - "title": "For parents" - }, - "correctChoiceRequired": true, - "canMakeChoiceBeforeAudioFinished": false - }, - "image-8": { - "kind": "exp-lookit-images-audio", - "audio": "matchzenna", - "images": [ - { - "id": "cue", - "src": "sad_zenna.jpg", - "position": "center", - "nonChoiceOption": true - }, - { - "id": "option1", - "src": "surprised_remy.jpg", - "position": "left", - "feedbackText": "Try again! Remy looks surprised in that picture. Can you find the picture where he looks sad, like Zenna?", - "displayDelayMs": 3500 - }, - { - "id": "option2", - "src": "sad_remy.jpg", - "correct": true, - "position": "right", - "feedbackText": "Great job! Remy is sad in that picture, just like Zenna is sad.", - "displayDelayMs": 3500 - } - ], - "baseDir": "https://www.mit.edu/~kimscott/placeholderstimuli/", - "autoProceed": false, - "doRecording": true, - "choiceRequired": true, - "parentTextBlock": { - "text": "Some explanatory text for parents", - "title": "For parents" - }, - "correctChoiceRequired": true, - "canMakeChoiceBeforeAudioFinished": false - } - } - - * ``` - * @class Exp-lookit-images-audio - * @extends Exp-frame-base - * @uses Full-screen - * @uses Video-record - * @uses Expand-assets */ @@ -363,6 +43,7 @@ export default ExpFrameBaseComponent.extend(FullScreen, VideoRecord, ExpandAsset minDurationAchieved: false, choiceRequired: false, + choiceAllowed: false, correctChoiceRequired: false, correctImageSelected: false, canMakeChoice: true, @@ -380,132 +61,46 @@ export default ExpFrameBaseComponent.extend(FullScreen, VideoRecord, ExpandAsset }, frameSchemaProperties: { - /** - * Whether to do webcam recording (will wait for webcam - * connection before starting audio or showing images if so) - * - * @property {Boolean} doRecording - */ doRecording: { type: 'boolean', - description: 'Whether to do webcam recording (will wait for webcam connection before starting audio if so' + description: 'Whether to do webcam recording (will wait for webcam connection before starting audio if so)' }, - - /** - * Whether to proceed automatically when all conditions are met, vs. enabling - * next button at that point. If true: the next, previous, and replay buttons are - * hidden, and the frame auto-advances after ALL of the following happen - * (a) the audio segment (if any) completes - * (b) the durationSeconds (if any) is achieved - * (c) a choice is made (if required) - * (d) that choice is correct (if required) - * (e) the choice audio (if any) completes - * (f) the choice text (if any) is dismissed - * If false: the next, previous, and replay buttons (as applicable) are displayed. - * It becomes possible to press 'next' only once the conditions above are met. - * - * @property {Boolean} autoProceed - * @default false - */ autoProceed: { type: 'boolean', description: 'Whether to proceed automatically after audio (and hide replay/next buttons)', default: false }, - - /** - * Minimum duration of frame in seconds. If set, then it will only - * be possible to proceed to the next frame after both the audio completes AND - * this duration is acheived. - * - * @property {Number} durationSeconds - * @default 0 - */ durationSeconds: { type: 'number', description: 'Minimum duration of frame in seconds', minimum: 0, default: 0 }, - - /** - * [Only used if durationSeconds set] Whether to - * show a progress bar based on durationSeconds in the parent text area. - * - * @property {Boolean} showProgressBar - * @default false - */ showProgressBar: { type: 'boolean', description: 'Whether to show a progress bar based on durationSeconds', default: false }, - - /** - * [Only used if not autoProceed] Whether to - * show a previous button to allow the participant to go to the previous frame - * - * @property {Boolean} showPreviousButton - * @default true - */ showPreviousButton: { type: 'boolean', default: true, description: 'Whether to show a previous button (used only if showing Next button)' }, - - /** - * [Only used if not autoProceed AND if there is audio] Whether to - * show a replay button to allow the participant to replay the audio - * - * @property {Boolean} showReplayButton - * @default false - */ showReplayButton: { type: 'boolean', default: true, description: 'Whether to show a replay button (used only if showing Next button)' }, - - /** - * Whether to have the image display area take up the whole screen if possible. - * This will only apply if (a) there is no parent text and (b) there are no - * control buttons (next, previous, replay) because the frame auto-proceeds. - * - * @property {Boolean} maximizeDisplay - * @default false - */ maximizeDisplay: { type: 'boolean', default: false, description: 'Whether to have the image display area take up the whole screen if possible' }, - - /** - * Audio file to play at the start of this frame. - * This can either be an array of {src: 'url', type: 'MIMEtype'} objects, e.g. - * listing equivalent .mp3 and .ogg files, or can be a single string `filename` - * which will be expanded based on `baseDir` and `audioTypes` values (see `audioTypes`). - * - * @property {Object[]} audio - * @default [] - * - */ audio: { anyOf: audioAssetOptions, description: 'Audio to play as this frame begins', default: [] }, - /** - * Text block to display to parent. (Each field is optional) - * - * @property {Object} parentTextBlock - * @param {String} title title to display - * @param {String} text paragraph of text - * @param {Object} css object specifying any css properties - * to apply to this section, and their values - e.g. - * {'color': 'gray', 'font-size': 'large'} - */ parentTextBlock: { type: 'object', properties: { @@ -522,49 +117,6 @@ export default ExpFrameBaseComponent.extend(FullScreen, VideoRecord, ExpandAsset }, default: {} }, - /** - * Array of images to display and information about their placement. For each - * image, you need to specify `src` (image name/URL) and placement (either by - * providing left/width/top values, or by using a `position` preset). - * - * Everything else is optional! This is where you would say that an image should - * be shown at a delay - * - * @property {Object[]} images - * @param {String} id unique ID for this image - * @param {String} src URL of image source. This can be a full - * URL, or relative to baseDir (see baseDir). - * @param {String} alt alt-text for image in case it doesn't load and for - * screen readers - * @param {Number} left left margin, as percentage of story area width. If not provided, - * the image is centered horizontally. - * @param {Number} width image width, as percentage of story area width. Note: - * in general only provide one of width and height; the other will be adjusted to - * preserve the image aspect ratio. - * @param {Number} top top margin, as percentage of story area height. If not provided, - * the image is centered vertically. - * @param {Number} height image height, as percentage of story area height. Note: - * in general only provide one of width and height; the other will be adjusted to - * preserve the image aspect ratio. - * @param {String} position one of 'left', 'center', 'right', 'fill' to use presets - * that place the image in approximately the left, center, or right third of - * the screen or to fill the screen as much as possible. - * This overrides left/width/top values if given. - * @param {Boolean} nonChoiceOption [Only used if `choiceRequired` is true] - * whether this should be treated as a non-clickable option (e.g., this is - * a picture of a girl, and the child needs to choose whether the girl has a - * DOG or a CAT) - * @param {Number} displayDelayMs Delay at which to show the image after trial - * start (timing will be relative to any audio or to start of trial if no - * audio). Optional; default is to show images immediately. - * @param {Object[]} feedbackAudio [Only used if `choiceRequired` is true] - * Audio to play upon clicking this image. This can either be an array of - * {src: 'url', type: 'MIMEtype'} objects, e.g. listing equivalent .mp3 and - * .ogg files, or can be a single string `filename` which will be expanded - * based on `baseDir` and `audioTypes` values (see `audioTypes`). - * @param {String} feedbackText [Only used if `choiceRequired` is true] Text - * to display in a dialogue window upon clicking the image. - */ images: { type: 'array', items: { @@ -614,87 +166,37 @@ export default ExpFrameBaseComponent.extend(FullScreen, VideoRecord, ExpandAsset } } }, - /** - * Color of background. See https://developer.mozilla.org/en-US/docs/Web/CSS/color_value - * for acceptable syntax: can use color names ('blue', 'red', 'green', etc.), or - * rgb hex values (e.g. '#800080' - include the '#') - * - * @property {String} backgroundColor - * @default 'black' - */ backgroundColor: { type: 'string', description: 'Color of background', default: 'black' }, - /** - * Color of area where images are shown, if different from overall background. - * Defaults to backgroundColor if one is provided. See - * https://developer.mozilla.org/en-US/docs/Web/CSS/color_value - * for acceptable syntax: can use color names ('blue', 'red', 'green', etc.), or - * rgb hex values (e.g. '#800080' - include the '#') - * - * @property {String} pageColor - * @default 'white' - */ pageColor: { type: 'string', - description: 'Color of image area', + description: 'Color of image area if different from background', default: 'white' }, - /** - * Whether this is a frame where the user needs to click to select one of the - * images before proceeding. - * - * @property {Boolean} choiceRequired - * @default false - */ + choiceRequired: { type: 'boolean', - description: 'Whether this is a frame where the user needs to click to select one of the images before proceeding', + description: 'Whether the user needs to click to select one of the images before proceeding', + default: false + }, + choiceAllowed: { + type: 'boolean', + description: 'Whether the user CAN select any of the images', default: false }, - /** - * [Only used if `choiceRequired` is true] Whether the participant has to select - * one of the *correct* images before proceeding. - * - * @property {Boolean} correctChoiceRequired - * @default false - */ correctChoiceRequired: { type: 'boolean', description: 'Whether this is a frame where the user needs to click a correct image before proceeding', default: false }, - /** - * Whether the participant can make a choice before audio finishes. (Only relevant - * if `choiceRequired` is true.) - * - * @property {Boolean} canMakeChoiceBeforeAudioFinished - * @default false - */ canMakeChoiceBeforeAudioFinished: { type: 'boolean', description: 'Whether the participant can select an option before audio finishes', default: false }, - /** - * Array representing times when particular images should be highlighted. Each - * element of the array should be of the form {'range': [3.64, 7.83], 'imageId': 'myImageId'}. - * The two `range` values are the start and end times of the highlight in seconds, - * relative to the audio played. The `imageId` corresponds to the `id` of an - * element of `images`. - * - * Highlights can overlap in time. Any that go longer than the audio will just - * be ignored/cut off. - * - * One strategy for generating a bunch of highlights for a longer story is to - * annotate using Audacity and export the labels to get the range values. - * - * @property {Object[]} highlights - * @param {Array} range [startTimeInSeconds, endTimeInSeconds], e.g. [3.64, 7.83] - * @param {String} imageId ID of the image to highlight, corresponding to the `id` field of the element of `images` to highlight - */ highlights: { type: 'array', items: { @@ -987,7 +489,7 @@ export default ExpFrameBaseComponent.extend(FullScreen, VideoRecord, ExpandAsset clickImage(imageId, nonChoiceOption, correct, feedbackText) { // If this is a choice frame and a valid choice and we're allowed to make a choice yet... - if (this.get('choiceRequired') && !nonChoiceOption && this.get('canMakeChoice') && !this.get('showingFeedbackDialog')) { + if ((this.get('choiceRequired') || this.get('choiceAllowed')) && !nonChoiceOption && this.get('canMakeChoice') && !this.get('showingFeedbackDialog')) { this.set('finishedAllAudio', true); // Treat as if audio is finished in case making choice before audio finishes - otherwise we never satisfy that criterion /** * When one of the image options is clicked during a choice frame diff --git a/app/components/exp-lookit-images-audio/doc.rst b/app/components/exp-lookit-images-audio/doc.rst index bbf28051..99e1981a 100644 --- a/app/components/exp-lookit-images-audio/doc.rst +++ b/app/components/exp-lookit-images-audio/doc.rst @@ -457,7 +457,7 @@ images [Array | ``[]``] the screen or to fill the screen as much as possible. This overrides left/width/top values if given. :nonChoiceOption: [Boolean] - [Only used if ``choiceRequired`` is true] + [Only used if ``choiceRequired`` or ``choiceAllowed`` is true] whether this should be treated as a non-clickable option (e.g., this is a picture of a girl, and the child needs to choose whether the girl has a DOG or a CAT) @@ -466,13 +466,13 @@ images [Array | ``[]``] start (timing will be relative to any audio or to start of trial if no audio). Optional; default is to show images immediately. :feedbackAudio: [Array or String] - [Only used if ``choiceRequired`` is true] Audio to play upon clicking this image. + [Only used if ``choiceRequired`` or ``choiceAllowed`` is true] Audio to play upon clicking this image. This can either be an array of {src: 'url', type: 'MIMEtype'} objects, e.g. listing equivalent .mp3 and .ogg files, or can be a single string ``filename`` which will be expanded based on ``baseDir`` and ``audioTypes`` values (see ``audioTypes``). :feedbackText: [String] - [Only used if ``choiceRequired`` is true] Text + [Only used if ``choiceRequired`` or ``choiceAllowed`` is true] Text to display in a dialogue window upon clicking the image. backgroundColor [String | ``'black'``] @@ -487,9 +487,12 @@ pageColor [String | ``'white'``] for acceptable syntax: can use color names ('blue', 'red', 'green', etc.), or rgb hex values (e.g. '#800080' - include the '#') +choiceAllowed [Boolean | ``false``] + Whether the user may click on images to select them. + choiceRequired [Boolean | ``false``] - Whether this is a frame where the user needs to click to select one of the - images before proceeding. + Whether the user is able to select the images (overrides + choiceAllowed if choiceAllowed is false) correctChoiceRequired [Boolean | ``false``] [Only used if `choiceRequired` is true] Whether the participant has to select @@ -497,7 +500,7 @@ correctChoiceRequired [Boolean | ``false``] canMakeChoiceBeforeAudioFinished [Boolean | ``false``] Whether the participant can make a choice before audio finishes. (Only relevant - if `choiceRequired` is true.) + if ``choiceRequired`` or `choiceAllowed`` is true.) highlights [Array | ``[]``] Array representing times when particular images should be highlighted. Each @@ -872,4 +875,4 @@ and a "correct" response can be required to move on. feedback as well as requiring a correct choice to move on. 5. If you were using a ``backgroundImage``, turn it into the first image in your image list, with - ``"left": 0, "width": "100", "top": 0, "height": 100``. \ No newline at end of file + ``"left": 0, "width": "100", "top": 0, "height": 100``. diff --git a/app/components/exp-lookit-images-audio/template.hbs b/app/components/exp-lookit-images-audio/template.hbs index f78ce386..b2c6c83e 100644 --- a/app/components/exp-lookit-images-audio/template.hbs +++ b/app/components/exp-lookit-images-audio/template.hbs @@ -14,7 +14,7 @@ {{#each images_parsed as |image|}}
- {{image.alt}} + {{image.alt}}
diff --git a/app/conf.py b/app/conf.py index 43bbeb34..49f1ba44 100644 --- a/app/conf.py +++ b/app/conf.py @@ -21,10 +21,6 @@ copyright = '2020, MIT' author = 'MIT' -# The full version, including alpha/beta/rc tags -release = '1.3.1' - - # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be @@ -59,4 +55,6 @@ # These paths are either relative to html_static_path # or fully qualified paths (eg. https://...) -html_css_files = ['elf.css'] \ No newline at end of file +html_css_files = ['elf.css'] + +master_doc = 'index' \ No newline at end of file diff --git a/app/styles/components/exp-lookit-video-consent.scss b/app/styles/components/exp-lookit-video-consent.scss index 2519a786..573dbc02 100644 --- a/app/styles/components/exp-lookit-video-consent.scss +++ b/app/styles/components/exp-lookit-video-consent.scss @@ -171,4 +171,8 @@ text-align: right; } + h2 { + font-size: large; + } + } diff --git a/images/Exp-lookit-composite-video-trial.png b/images/Exp-lookit-composite-video-trial.png deleted file mode 100644 index 9113fa2a..00000000 Binary files a/images/Exp-lookit-composite-video-trial.png and /dev/null differ diff --git a/images/Exp-lookit-geometry-alternation-open.png b/images/Exp-lookit-geometry-alternation-open.png deleted file mode 100644 index d3001a28..00000000 Binary files a/images/Exp-lookit-geometry-alternation-open.png and /dev/null differ diff --git a/images/Exp-lookit-geometry-alternation.png b/images/Exp-lookit-geometry-alternation.png deleted file mode 100644 index 6203c370..00000000 Binary files a/images/Exp-lookit-geometry-alternation.png and /dev/null differ diff --git a/images/Exp-lookit-preferential-looking.png b/images/Exp-lookit-preferential-looking.png deleted file mode 100644 index aa6b9a8c..00000000 Binary files a/images/Exp-lookit-preferential-looking.png and /dev/null differ diff --git a/images/Exp-lookit-preview-explanation.png b/images/Exp-lookit-preview-explanation.png deleted file mode 100644 index c2fbc332..00000000 Binary files a/images/Exp-lookit-preview-explanation.png and /dev/null differ diff --git a/images/Exp-lookit-story-page.png b/images/Exp-lookit-story-page.png deleted file mode 100644 index d649232c..00000000 Binary files a/images/Exp-lookit-story-page.png and /dev/null differ diff --git a/images/Exp-video-preview.png b/images/Exp-video-preview.png deleted file mode 100644 index 94059ca3..00000000 Binary files a/images/Exp-video-preview.png and /dev/null differ diff --git a/package.json b/package.json index 8987da70..3408da45 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "ember-lookit-frameplayer", - "version": "v2.0.0", + "version": "v2.1.0", "description": "Ember Frame Player", "private": true, "directories": { diff --git a/tests/integration/components/exp-frame-base-test.js b/tests/integration/components/exp-frame-base-test.js index 71fb0c7e..298bc469 100644 --- a/tests/integration/components/exp-frame-base-test.js +++ b/tests/integration/components/exp-frame-base-test.js @@ -1,22 +1,8 @@ import Ember from 'ember'; - -import {moduleForComponent, skip} from 'ember-qunit'; - -import sinon from 'sinon'; - -/** - * COPIED FROM EXP-ADDONS - * These tests work when inside an app, but not when they are part of an addon. - * There may be some weird rules for addons/ registry/ dynamic templates to be - * resolved before we can make this work - */ - -/* -// yarn add ember-cli-htmlbars-inline-precompile leads to ember test failing with: -// The "path" argument must be of type string. Received type undefined +import DS from 'ember-data'; +import {moduleForComponent, test} from 'ember-qunit'; import hbs from 'htmlbars-inline-precompile'; - -// The component doesn't actually have a template, so generate one that can be used to trigger actions +import sinon from 'sinon'; const BasicTemplate = hbs` `; @@ -39,9 +25,8 @@ moduleForComponent('exp-frame-base', 'Integration | Component | exp frame base', this.errorSpy = errorSpy; } }); -*/ -skip('it shows an error and does not advance when it encounters an adapter 400 error', function (assert) { +test('it shows an error and does not advance when it encounters an adapter 400 error', function (assert) { assert.expect(3); const nextAction = sinon.spy(); @@ -60,11 +45,11 @@ skip('it shows an error and does not advance when it encounters an adapter 400 e // won't propagate up. this.$('#go-next').click(); assert.ok(saveHandler.calledOnce, 'Clicking next button should attempt to save the frame'); - assert.notOk(nextAction.calledOnce, 'When save fails, the passed-in next action should not be called'); + assert.ok(nextAction.calledOnce, 'When save fails, the passed-in next action is still called'); assert.ok(this.errorSpy.calledOnce, 'When save fails, a message should be presented to the user'); }); -skip('Moves to the next frame when save is successful', function (assert) { +test('Moves to the next frame when save is successful', function (assert) { assert.expect(3); const nextAction = sinon.spy(); diff --git a/tests/integration/components/exp-lookit-survey-test.js b/tests/integration/components/exp-lookit-survey-test.js new file mode 100644 index 00000000..9438a7bf --- /dev/null +++ b/tests/integration/components/exp-lookit-survey-test.js @@ -0,0 +1,77 @@ +import Ember from 'ember'; +import DS from 'ember-data'; +import {moduleForComponent} from 'ember-qunit'; +import hbs from 'htmlbars-inline-precompile'; +import sinon from 'sinon'; +import { setupRenderingTest } from 'ember-qunit'; +import { module, test } from 'qunit'; + + + +module('Integration | Component | exp lookit survey', function(hooks) { + setupRenderingTest(hooks); + + test('Exp-lookit-survey frame renders', async function (assert) { + + assert.expect(2); + let formSchema = { + schema: { + type: "object", + title: "Tell us about your pet!", + properties: { + "age": { + "type": "integer", + "title": "Age", + "maximum": 200, + "minimum": 0, + "required": true + }, + "name": { + "type": "string", + "title": "Name", + "required": true + }, + "species": { + "enum": [ + "dog", + "cat", + "fish", + "bird", + "raccoon" + ], + "type": "string", + "title": "What type of animal?", + "default": "" + } + } + }, + options: { + fields: { + age: { + "numericEntry": true + }, + name: { + "placeholder": "a name..." + }, + species: { + "type": "radio", + "message": "Seriously, what species??", + "validator": "required-field" + } + } + } + }; + + this.set('formSchema', formSchema); + await this.render( + hbs`{{exp-lookit-survey + nextButtonText="Moving on" + formSchema=formSchema + }}` + ); + + // Note: not all questions appear to be rendered at this time. May need to wait for that separately... + assert.equal(this.element.querySelector('legend').textContent.trim(), 'Tell us about your pet!'); + assert.equal(this.element.querySelector('button').textContent.trim(), 'Moving on'); + }); +}); diff --git a/tests/unit/utils/is-color-test.js b/tests/unit/utils/is-color-test.js new file mode 100644 index 00000000..d80918b6 --- /dev/null +++ b/tests/unit/utils/is-color-test.js @@ -0,0 +1,23 @@ +import { colorSpecToRgbaArray, isColor } from '../../../utils/is-color'; +import { module, test} from 'qunit'; + + +module('Unit | Utility | is color'); + +test('Valid CSS color syntax recognized', function(assert) { + assert.ok(isColor('black'), 'Black is a color'); + assert.notOk(isColor('blark'), 'Blark is not a color'); + + assert.ok(isColor('#cc00ff'), '#cc00ff is a color'); + assert.notOk(isColor('cc00ff'), 'cc00ff is not a color'); +}); + +test('CSS color specs correctly turned into RGBA arrays', function(assert) { + let black = new Uint8ClampedArray([0, 0, 0, 255]); + assert.deepEqual(colorSpecToRgbaArray('black'), black); + assert.deepEqual(colorSpecToRgbaArray('#000'), black); + + let limegreen = new Uint8ClampedArray([0, 255, 0, 255]); + assert.deepEqual(colorSpecToRgbaArray('lime'), limegreen); + assert.deepEqual(colorSpecToRgbaArray('#0f0'), limegreen); +}); \ No newline at end of file