💄 autopep8

This commit is contained in:
Yuri Bochkarev 2018-06-24 00:10:22 +03:00
parent 3df019a661
commit d2467a7ae2
2 changed files with 401 additions and 317 deletions

View file

@ -47,338 +47,388 @@ def parse_args(args=None):
# Basic options
group_basic = parser.add_argument_group('Basic options')
group_basic.add_argument('class_names',
action='store',
nargs='*',
help='name(s) of the class(es) (e.g. "ml-005")')
group_basic.add_argument(
'class_names',
action='store',
nargs='*',
help='name(s) of the class(es) (e.g. "ml-005")')
group_basic.add_argument('-u',
'--username',
dest='username',
action='store',
default=None,
help='username (email) that you use to login to Coursera')
group_basic.add_argument(
'-u',
'--username',
dest='username',
action='store',
default=None,
help='username (email) that you use to login to Coursera')
group_basic.add_argument('-p',
'--password',
dest='password',
action='store',
default=None,
help='coursera password')
group_basic.add_argument(
'-p',
'--password',
dest='password',
action='store',
default=None,
help='coursera password')
group_basic.add_argument('--jobs',
dest='jobs',
action='store',
default=1,
type=int,
help='number of parallel jobs to use for '
'downloading resources. (Default: 1)')
group_basic.add_argument(
'--jobs',
dest='jobs',
action='store',
default=1,
type=int,
help='number of parallel jobs to use for '
'downloading resources. (Default: 1)')
group_basic.add_argument('--download-delay',
dest='download_delay',
action='store',
default=60,
type=int,
help='number of seconds to wait before downloading '
'next course. (Default: 60)')
group_basic.add_argument(
'--download-delay',
dest='download_delay',
action='store',
default=60,
type=int,
help='number of seconds to wait before downloading '
'next course. (Default: 60)')
group_basic.add_argument('-b', # FIXME: kill this one-letter option
'--preview',
dest='preview',
action='store_true',
default=False,
help='get videos from preview pages. (Default: False)')
group_basic.add_argument(
'-b', # FIXME: kill this one-letter option
'--preview',
dest='preview',
action='store_true',
default=False,
help='get videos from preview pages. (Default: False)')
group_basic.add_argument('--path',
dest='path',
action='store',
default='',
help='path to where to save the file. (Default: current directory)')
group_basic.add_argument(
'--path',
dest='path',
action='store',
default='',
help='path to where to save the file. (Default: current directory)')
group_basic.add_argument('-sl', # FIXME: deprecate this option
'--subtitle-language',
dest='subtitle_language',
action='store',
default='all',
help='Choose language to download subtitles and transcripts. (Default: all)'
'Use special value "all" to download all available.'
'To download subtitles and transcripts of multiple languages,'
'use comma(s) (without spaces) to seperate the names of the languages, i.e., "en,zh-CN".'
'To download subtitles and transcripts of alternative language(s) '
'if only the current language is not available,'
'put an "|<lang>" for each of the alternative languages after '
'the current language, i.e., "en|fr,zh-CN|zh-TW|de", and make sure the parameter are wrapped with '
'quotes when "|" presents.'
)
group_basic.add_argument(
'-sl', # FIXME: deprecate this option
'--subtitle-language',
dest='subtitle_language',
action='store',
default='all',
help='Choose language to download subtitles and transcripts.'
'(Default: all) Use special value "all" to download all available.'
'To download subtitles and transcripts of multiple languages,'
'use comma(s) (without spaces) to seperate the names of the languages,'
' i.e., "en,zh-CN".'
'To download subtitles and transcripts of alternative language(s) '
'if only the current language is not available,'
'put an "|<lang>" for each of the alternative languages after '
'the current language, i.e., "en|fr,zh-CN|zh-TW|de", and make sure '
'the parameter are wrapped with quotes when "|" presents.'
)
# Selection of material to download
group_material = parser.add_argument_group(
'Selection of material to download')
group_material.add_argument('--only-syllabus',
dest='only_syllabus',
action='store_true',
default=False,
help='download only syllabus, skip course content. '
'(Default: False)')
group_material.add_argument(
'--only-syllabus',
dest='only_syllabus',
action='store_true',
default=False,
help='download only syllabus, skip course content. '
'(Default: False)')
group_material.add_argument('--download-quizzes',
dest='download_quizzes',
action='store_true',
default=False,
help='download quiz and exam questions. (Default: False)')
group_material.add_argument(
'--download-quizzes',
dest='download_quizzes',
action='store_true',
default=False,
help='download quiz and exam questions. (Default: False)')
group_material.add_argument('--download-notebooks',
dest='download_notebooks',
action='store_true',
default=False,
help='download Python Jupyther Notebooks. (Default: False)')
group_material.add_argument(
'--download-notebooks',
dest='download_notebooks',
action='store_true',
default=False,
help='download Python Jupyther Notebooks. (Default: False)')
group_material.add_argument('--about', # FIXME: should be --about-course
dest='about',
action='store_true',
default=False,
help='download "about" metadata. (Default: False)')
group_material.add_argument(
'--about', # FIXME: should be --about-course
dest='about',
action='store_true',
default=False,
help='download "about" metadata. (Default: False)')
group_material.add_argument('-f',
'--formats',
dest='file_formats',
action='store',
default='all',
help='file format extensions to be downloaded in'
' quotes space separated, e.g. "mp4 pdf" '
'(default: special value "all")')
group_material.add_argument(
'-f',
'--formats',
dest='file_formats',
action='store',
default='all',
help='file format extensions to be downloaded in'
' quotes space separated, e.g. "mp4 pdf" '
'(default: special value "all")')
group_material.add_argument('--ignore-formats',
dest='ignore_formats',
action='store',
default=None,
help='file format extensions of resources to ignore'
' (default: None)')
group_material.add_argument(
'--ignore-formats',
dest='ignore_formats',
action='store',
default=None,
help='file format extensions of resources to ignore'
' (default: None)')
group_material.add_argument('-sf', # FIXME: deprecate this option
'--section_filter',
dest='section_filter',
action='store',
default=None,
help='only download sections which contain this'
' regex (default: disabled)')
group_material.add_argument(
'-sf', # FIXME: deprecate this option
'--section_filter',
dest='section_filter',
action='store',
default=None,
help='only download sections which contain this'
' regex (default: disabled)')
group_material.add_argument('-lf', # FIXME: deprecate this option
'--lecture_filter',
dest='lecture_filter',
action='store',
default=None,
help='only download lectures which contain this regex'
' (default: disabled)')
group_material.add_argument(
'-lf', # FIXME: deprecate this option
'--lecture_filter',
dest='lecture_filter',
action='store',
default=None,
help='only download lectures which contain this regex'
' (default: disabled)')
group_material.add_argument('-rf', # FIXME: deprecate this option
'--resource_filter',
dest='resource_filter',
action='store',
default=None,
help='only download resources which match this regex'
' (default: disabled)')
group_material.add_argument(
'-rf', # FIXME: deprecate this option
'--resource_filter',
dest='resource_filter',
action='store',
default=None,
help='only download resources which match this regex'
' (default: disabled)')
group_material.add_argument('--video-resolution',
dest='video_resolution',
action='store',
default='540p',
help='video resolution to download (default: 540p); '
'only valid for on-demand courses; '
'only values allowed: 360p, 540p, 720p')
group_material.add_argument(
'--video-resolution',
dest='video_resolution',
action='store',
default='540p',
help='video resolution to download (default: 540p); '
'only valid for on-demand courses; '
'only values allowed: 360p, 540p, 720p')
group_material.add_argument('--disable-url-skipping',
dest='disable_url_skipping',
action='store_true',
default=False,
help='disable URL skipping, all URLs will be '
'downloaded (default: False)')
group_material.add_argument(
'--disable-url-skipping',
dest='disable_url_skipping',
action='store_true',
default=False,
help='disable URL skipping, all URLs will be '
'downloaded (default: False)')
# Parameters related to external downloaders
group_external_dl = parser.add_argument_group('External downloaders')
group_external_dl.add_argument('--wget',
dest='wget',
action='store',
nargs='?',
const='wget',
default=None,
help='use wget for downloading,'
'optionally specify wget bin')
group_external_dl.add_argument('--curl',
dest='curl',
action='store',
nargs='?',
const='curl',
default=None,
help='use curl for downloading,'
' optionally specify curl bin')
group_external_dl.add_argument('--aria2',
dest='aria2',
action='store',
nargs='?',
const='aria2c',
default=None,
help='use aria2 for downloading,'
' optionally specify aria2 bin')
group_external_dl.add_argument('--axel',
dest='axel',
action='store',
nargs='?',
const='axel',
default=None,
help='use axel for downloading,'
' optionally specify axel bin')
group_external_dl.add_argument('--downloader-arguments',
dest='downloader_arguments',
default='',
help='additional arguments passed to the'
' downloader')
group_external_dl.add_argument(
'--wget',
dest='wget',
action='store',
nargs='?',
const='wget',
default=None,
help='use wget for downloading,'
'optionally specify wget bin')
parser.add_argument('--list-courses',
dest='list_courses',
action='store_true',
default=False,
help='list course names (slugs) and quit. Listed '
'course names can be put into program arguments')
group_external_dl.add_argument(
'--curl',
dest='curl',
action='store',
nargs='?',
const='curl',
default=None,
help='use curl for downloading,'
' optionally specify curl bin')
parser.add_argument('--resume',
dest='resume',
action='store_true',
default=False,
help='resume incomplete downloads (default: False)')
group_external_dl.add_argument(
'--aria2',
dest='aria2',
action='store',
nargs='?',
const='aria2c',
default=None,
help='use aria2 for downloading,'
' optionally specify aria2 bin')
parser.add_argument('-o',
'--overwrite',
dest='overwrite',
action='store_true',
default=False,
help='whether existing files should be overwritten'
' (default: False)')
group_external_dl.add_argument(
'--axel',
dest='axel',
action='store',
nargs='?',
const='axel',
default=None,
help='use axel for downloading,'
' optionally specify axel bin')
parser.add_argument('--verbose-dirs',
dest='verbose_dirs',
action='store_true',
default=False,
help='include class name in section directory name')
group_external_dl.add_argument(
'--downloader-arguments',
dest='downloader_arguments',
default='',
help='additional arguments passed to the'
' downloader')
parser.add_argument('--quiet',
dest='quiet',
action='store_true',
default=False,
help='omit as many messages as possible'
' (only printing errors)')
parser.add_argument(
'--list-courses',
dest='list_courses',
action='store_true',
default=False,
help='list course names (slugs) and quit. Listed '
'course names can be put into program arguments')
parser.add_argument('-r',
'--reverse',
dest='reverse',
action='store_true',
default=False,
help='download sections in reverse order')
parser.add_argument(
'--resume',
dest='resume',
action='store_true',
default=False,
help='resume incomplete downloads (default: False)')
parser.add_argument('--combined-section-lectures-nums',
dest='combined_section_lectures_nums',
action='store_true',
default=False,
help='include lecture and section name in final files')
parser.add_argument(
'-o',
'--overwrite',
dest='overwrite',
action='store_true',
default=False,
help='whether existing files should be overwritten'
' (default: False)')
parser.add_argument('--unrestricted-filenames',
dest='unrestricted_filenames',
action='store_true',
default=False,
help='Do not limit filenames to be ASCII-only')
parser.add_argument(
'--verbose-dirs',
dest='verbose_dirs',
action='store_true',
default=False,
help='include class name in section directory name')
parser.add_argument(
'--quiet',
dest='quiet',
action='store_true',
default=False,
help='omit as many messages as possible'
' (only printing errors)')
parser.add_argument(
'-r',
'--reverse',
dest='reverse',
action='store_true',
default=False,
help='download sections in reverse order')
parser.add_argument(
'--combined-section-lectures-nums',
dest='combined_section_lectures_nums',
action='store_true',
default=False,
help='include lecture and section name in final files')
parser.add_argument(
'--unrestricted-filenames',
dest='unrestricted_filenames',
action='store_true',
default=False,
help='Do not limit filenames to be ASCII-only')
# Advanced authentication
group_adv_auth = parser.add_argument_group(
'Advanced authentication options')
group_adv_auth.add_argument('-c',
'--cookies_file',
dest='cookies_file',
action='store',
default=None,
help='full path to the cookies.txt file')
group_adv_auth.add_argument(
'-c',
'--cookies_file',
dest='cookies_file',
action='store',
default=None,
help='full path to the cookies.txt file')
group_adv_auth.add_argument('-n',
'--netrc',
dest='netrc',
nargs='?',
action='store',
const=True,
default=False,
help='use netrc for reading passwords, uses default'
' location if no path specified')
group_adv_auth.add_argument(
'-n',
'--netrc',
dest='netrc',
nargs='?',
action='store',
const=True,
default=False,
help='use netrc for reading passwords, uses default'
' location if no path specified')
group_adv_auth.add_argument('-k',
'--keyring',
dest='use_keyring',
action='store_true',
default=False,
help='use keyring provided by operating system to '
'save and load credentials')
group_adv_auth.add_argument(
'-k',
'--keyring',
dest='use_keyring',
action='store_true',
default=False,
help='use keyring provided by operating system to '
'save and load credentials')
group_adv_auth.add_argument('--clear-cache',
dest='clear_cache',
action='store_true',
default=False,
help='clear cached cookies')
group_adv_auth.add_argument(
'--clear-cache',
dest='clear_cache',
action='store_true',
default=False,
help='clear cached cookies')
# Advanced miscellaneous options
group_adv_misc = parser.add_argument_group(
'Advanced miscellaneous options')
group_adv_misc.add_argument('--hook',
dest='hooks',
action='append',
default=[],
help='hooks to run when finished')
group_adv_misc.add_argument(
'--hook',
dest='hooks',
action='append',
default=[],
help='hooks to run when finished')
group_adv_misc.add_argument('-pl',
'--playlist',
dest='playlist',
action='store_true',
default=False,
help='generate M3U playlists for course weeks')
group_adv_misc.add_argument(
'-pl',
'--playlist',
dest='playlist',
action='store_true',
default=False,
help='generate M3U playlists for course weeks')
group_adv_misc.add_argument('--mathjax-cdn',
dest='mathjax_cdn_url',
default='https://cdn.mathjax.org/mathjax/latest/MathJax.js',
help='the cdn address of MathJax.js'
)
group_adv_misc.add_argument(
'--mathjax-cdn',
dest='mathjax_cdn_url',
default='https://cdn.mathjax.org/mathjax/latest/MathJax.js',
help='the cdn address of MathJax.js'
)
# Debug options
group_debug = parser.add_argument_group('Debugging options')
group_debug.add_argument('--skip-download',
dest='skip_download',
action='store_true',
default=False,
help='for debugging: skip actual downloading of files')
group_debug.add_argument(
'--skip-download',
dest='skip_download',
action='store_true',
default=False,
help='for debugging: skip actual downloading of files')
group_debug.add_argument('--debug',
dest='debug',
action='store_true',
default=False,
help='print lots of debug information')
group_debug.add_argument(
'--debug',
dest='debug',
action='store_true',
default=False,
help='print lots of debug information')
group_debug.add_argument('--cache-syllabus',
dest='cache_syllabus',
action='store_true',
default=False,
help='cache course syllabus into a file')
group_debug.add_argument(
'--cache-syllabus',
dest='cache_syllabus',
action='store_true',
default=False,
help='cache course syllabus into a file')
group_debug.add_argument('--version',
dest='version',
action='store_true',
default=False,
help='display version and exit')
group_debug.add_argument(
'--version',
dest='version',
action='store_true',
default=False,
help='display version and exit')
group_debug.add_argument('-l', # FIXME: remove short option from rarely used ones
'--process_local_page',
dest='local_page',
help='uses or creates local cached version of syllabus'
' page')
group_debug.add_argument(
'-l', # FIXME: remove short option from rarely used ones
'--process_local_page',
dest='local_page',
help='uses or creates local cached version of syllabus'
' page')
# Final parsing of the options
args = parser.parse_args(args)
@ -403,7 +453,8 @@ def parse_args(args=None):
# show version?
if args.version:
# we use print (not logging) function because version may be used
# by some external script while logging may output excessive information
# by some external script while logging may output excessive
# information
print(__version__)
sys.exit(0)

View file

@ -132,7 +132,8 @@ def test_extract_links_from_programming_immediate_instructions_http_error(
@patch('coursera.api.get_page')
def test_ondemand_programming_supplement_no_instructions(get_page, course):
no_instructions = slurp_fixture('json/supplement-programming-no-instructions.json')
no_instructions = slurp_fixture(
'json/supplement-programming-no-instructions.json')
get_page.return_value = json.loads(no_instructions)
output = course.extract_links_from_programming('0')
@ -171,7 +172,8 @@ def test_ondemand_from_programming_immediate_instructions_no_instructions(
@patch('coursera.api.get_page')
def test_ondemand_programming_supplement_empty_instructions(get_page, course):
empty_instructions = slurp_fixture('json/supplement-programming-empty-instructions.json')
empty_instructions = slurp_fixture(
'json/supplement-programming-empty-instructions.json')
get_page.return_value = json.loads(empty_instructions)
output = course.extract_links_from_programming('0')
@ -186,7 +188,7 @@ def test_ondemand_programming_supplement_empty_instructions(get_page, course):
@patch('coursera.api.get_page')
def test_ondemand_programming_immediate_instructions_empty_instructions(
get_page, course):
get_page, course):
empty_instructions = slurp_fixture(
'json/supplement-programming-immediate-instructions-empty-instructions.json')
get_page.return_value = json.loads(empty_instructions)
@ -207,10 +209,10 @@ def test_ondemand_programming_supplement_one_asset(get_page, course):
one_asset_url = slurp_fixture('json/asset-urls-one.json')
asset_json = json.loads(one_asset_url)
get_page.side_effect = [json.loads(one_asset_tag),
json.loads(one_asset_url)]
json.loads(one_asset_url)]
expected_output = {'pdf': [(asset_json['elements'][0]['url'],
'statement-pca')]}
'statement-pca')]}
output = course.extract_links_from_programming('0')
# Make sure that SOME html content has been extracted, but remove
@ -238,14 +240,15 @@ def test_extract_references_poll(get_page, course):
@patch('coursera.api.get_page')
def test_ondemand_programming_immediate_instructions_one_asset(get_page, course):
one_asset_tag = slurp_fixture('json/supplement-programming-immediate-instructions-one-asset.json')
one_asset_tag = slurp_fixture(
'json/supplement-programming-immediate-instructions-one-asset.json')
one_asset_url = slurp_fixture('json/asset-urls-one.json')
asset_json = json.loads(one_asset_url)
get_page.side_effect = [json.loads(one_asset_tag),
json.loads(one_asset_url)]
json.loads(one_asset_url)]
expected_output = {'pdf': [(asset_json['elements'][0]['url'],
'statement-pca')]}
'statement-pca')]}
output = course.extract_links_from_programming_immediate_instructions('0')
# Make sure that SOME html content has been extracted, but remove
@ -259,12 +262,14 @@ def test_ondemand_programming_immediate_instructions_one_asset(get_page, course)
@patch('coursera.api.get_page')
def test_ondemand_programming_supplement_three_assets(get_page, course):
three_assets_tag = slurp_fixture('json/supplement-programming-three-assets.json')
three_assets_tag = slurp_fixture(
'json/supplement-programming-three-assets.json')
three_assets_url = slurp_fixture('json/asset-urls-three.json')
get_page.side_effect = [json.loads(three_assets_tag),
json.loads(three_assets_url)]
json.loads(three_assets_url)]
expected_output = json.loads(slurp_fixture('json/supplement-three-assets-output.json'))
expected_output = json.loads(slurp_fixture(
'json/supplement-three-assets-output.json'))
output = course.extract_links_from_programming('0')
output = json.loads(json.dumps(output))
@ -279,12 +284,15 @@ def test_ondemand_programming_supplement_three_assets(get_page, course):
@patch('coursera.api.get_page')
def test_extract_links_from_lecture_assets_typename_asset(get_page, course):
open_course_assets_reply = slurp_fixture('json/supplement-open-course-assets-reply.json')
api_assets_v1_reply = slurp_fixture('json/supplement-api-assets-v1-reply.json')
open_course_assets_reply = slurp_fixture(
'json/supplement-open-course-assets-reply.json')
api_assets_v1_reply = slurp_fixture(
'json/supplement-api-assets-v1-reply.json')
get_page.side_effect = [json.loads(open_course_assets_reply),
json.loads(api_assets_v1_reply)]
json.loads(api_assets_v1_reply)]
expected_output = json.loads(slurp_fixture('json/supplement-extract-links-from-lectures-output.json'))
expected_output = json.loads(slurp_fixture(
'json/supplement-extract-links-from-lectures-output.json'))
assets = ['giAxucdaEeWJTQ5WTi8YJQ']
output = course._extract_links_from_lecture_assets(assets)
output = json.loads(json.dumps(output))
@ -298,14 +306,20 @@ def test_extract_links_from_lecture_assets_typname_url_and_asset(get_page, cours
links both from typename == 'asset' and == 'url'.
"""
get_page.side_effect = [
json.loads(slurp_fixture('json/supplement-open-course-assets-typename-url-reply-1.json')),
json.loads(slurp_fixture('json/supplement-open-course-assets-typename-url-reply-2.json')),
json.loads(slurp_fixture('json/supplement-open-course-assets-typename-url-reply-3.json')),
json.loads(slurp_fixture('json/supplement-open-course-assets-typename-url-reply-4.json')),
json.loads(slurp_fixture('json/supplement-open-course-assets-typename-url-reply-5.json')),
json.loads(slurp_fixture(
'json/supplement-open-course-assets-typename-url-reply-1.json')),
json.loads(slurp_fixture(
'json/supplement-open-course-assets-typename-url-reply-2.json')),
json.loads(slurp_fixture(
'json/supplement-open-course-assets-typename-url-reply-3.json')),
json.loads(slurp_fixture(
'json/supplement-open-course-assets-typename-url-reply-4.json')),
json.loads(slurp_fixture(
'json/supplement-open-course-assets-typename-url-reply-5.json')),
]
expected_output = json.loads(slurp_fixture('json/supplement-extract-links-from-lectures-url-asset-output.json'))
expected_output = json.loads(slurp_fixture(
'json/supplement-extract-links-from-lectures-url-asset-output.json'))
assets = ['Yry0spSKEeW8oA5fR3afVQ',
'kMQyUZSLEeWj-hLVp2Pm8w',
'xkAloZmJEeWjYA4jOOgP8Q']
@ -322,7 +336,8 @@ def test_list_courses(get_page, course):
get_page.side_effect = [
json.loads(slurp_fixture('json/list-courses-input.json'))
]
expected_output = json.loads(slurp_fixture('json/list-courses-output.json'))
expected_output = json.loads(
slurp_fixture('json/list-courses-output.json'))
expected_output = expected_output['courses']
output = course.list_courses()
assert expected_output == output
@ -344,12 +359,13 @@ def test_list_courses(get_page, course):
'en,zh-CN|zh-TW', "None"),
]
)
def test_extract_subtitles_from_video_dom(input_filename,output_filename,subtitle_language, video_id):
def test_extract_subtitles_from_video_dom(input_filename, output_filename, subtitle_language, video_id):
video_dom = json.loads(slurp_fixture('json/%s' % input_filename))
expected_output = json.loads(slurp_fixture('json/%s' % output_filename))
course = api.CourseraOnDemand(
session=Mock(cookies={}), course_id='0', course_name='test_course')
actual_output = course._extract_subtitles_from_video_dom(video_dom, subtitle_language, video_id)
actual_output = course._extract_subtitles_from_video_dom(
video_dom, subtitle_language, video_id)
actual_output = json.loads(json.dumps(actual_output))
assert actual_output == expected_output
@ -357,22 +373,29 @@ def test_extract_subtitles_from_video_dom(input_filename,output_filename,subtitl
@pytest.mark.parametrize(
"input_filename,output_filename", [
('empty-input.json', 'empty-output.txt'),
('answer-text-replaced-with-span-input.json', 'answer-text-replaced-with-span-output.txt'),
('question-type-textExactMatch-input.json', 'question-type-textExactMatch-output.txt'),
('answer-text-replaced-with-span-input.json',
'answer-text-replaced-with-span-output.txt'),
('question-type-textExactMatch-input.json',
'question-type-textExactMatch-output.txt'),
('question-type-regex-input.json', 'question-type-regex-output.txt'),
('question-type-mathExpression-input.json', 'question-type-mathExpression-output.txt'),
('question-type-mathExpression-input.json',
'question-type-mathExpression-output.txt'),
('question-type-checkbox-input.json', 'question-type-checkbox-output.txt'),
('question-type-mcq-input.json', 'question-type-mcq-output.txt'),
('question-type-singleNumeric-input.json', 'question-type-singleNumeric-output.txt'),
('question-type-singleNumeric-input.json',
'question-type-singleNumeric-output.txt'),
('question-type-reflect-input.json', 'question-type-reflect-output.txt'),
('question-type-mcqReflect-input.json', 'question-type-mcqReflect-output.txt'),
('question-type-mcqReflect-input.json',
'question-type-mcqReflect-output.txt'),
('question-type-unknown-input.json', 'question-type-unknown-output.txt'),
('multiple-questions-input.json', 'multiple-questions-output.txt'),
]
)
def test_quiz_exam_to_markup_converter(input_filename, output_filename):
quiz_json = json.loads(slurp_fixture('json/quiz-to-markup/%s' % input_filename))
expected_output = slurp_fixture('json/quiz-to-markup/%s' % output_filename).strip()
quiz_json = json.loads(slurp_fixture(
'json/quiz-to-markup/%s' % input_filename))
expected_output = slurp_fixture(
'json/quiz-to-markup/%s' % output_filename).strip()
converter = api.QuizExamToMarkupConverter(session=None)
actual_output = converter(quiz_json).strip()
@ -411,7 +434,8 @@ class TestMarkupToHTMLConverter:
<meta charset="UTF-8"/>
"""
assert self._p(markup) + self.STYLE == output
assert self._p(markup) + self.STYLE_WITH_ALTER == output_with_alter_mjcdn
assert self._p(markup) + \
self.STYLE_WITH_ALTER == output_with_alter_mjcdn
def test_replace_text_tag(self):
markup = """
@ -438,7 +462,8 @@ class TestMarkupToHTMLConverter:
output = self.markup_to_html(markup)
output_with_alter_mjcdn = self.markup_to_html_with_alter_mjcdn(markup)
assert self._p(result) + self.STYLE == output
assert self._p(result) + self.STYLE_WITH_ALTER == output_with_alter_mjcdn
assert self._p(result) + \
self.STYLE_WITH_ALTER == output_with_alter_mjcdn
def test_replace_heading(self):
output = self.markup_to_html("""
@ -501,7 +526,8 @@ class TestMarkupToHTMLConverter:
'nodata': Mock(data=None, content_type='image/png')
}
mock_asset_retriever.__call__ = Mock(return_value=None)
mock_asset_retriever.__getitem__ = Mock(side_effect=replies.__getitem__)
mock_asset_retriever.__getitem__ = Mock(
side_effect=replies.__getitem__)
self.markup_to_html._asset_retriever = mock_asset_retriever
output = self.markup_to_html("""
@ -532,7 +558,8 @@ class TestMarkupToHTMLConverter:
'bWTK9sYwEeW7AxLLCrgDQQ': Mock(data=b'b', content_type='unknown')
}
mock_asset_retriever.__call__ = Mock(return_value=None)
mock_asset_retriever.__getitem__ = Mock(side_effect=replies.__getitem__)
mock_asset_retriever.__getitem__ = Mock(
side_effect=replies.__getitem__)
self.markup_to_html._asset_retriever = mock_asset_retriever
output = self.markup_to_html("""
@ -570,6 +597,7 @@ def test_quiz_converter():
with open('quiz.html', 'w') as file:
file.write(result)
def test_quiz_converter_all():
pytest.skip()
import os
@ -583,8 +611,8 @@ def test_quiz_converter_all():
markup_to_html = api.MarkupToHTMLConverter(session=session)
path = 'quiz_json'
for filename in ['quiz-audio.json']: #os.listdir(path):
# for filename in ['all_question_types.json']:
for filename in ['quiz-audio.json']: # os.listdir(path):
# for filename in ['all_question_types.json']:
# if 'YV0W4' not in filename:
# continue
# if 'QVHj1' not in filename:
@ -600,6 +628,7 @@ def test_quiz_converter_all():
with open('quiz_html/' + filename + '.html', 'w') as f:
f.write(result)
def create_session():
from coursera.coursera_dl import get_session
from coursera.credentials import get_credentials
@ -625,10 +654,14 @@ def test_asset_retriever(get_reply, get_page):
'vdqUTz61Eea_CQ5dfWSAjQ']
expected_output = [
api.Asset(id="bWTK9sYwEeW7AxLLCrgDQQ", name="M111.mp3", type_name="audio", url="url4", content_type="image/png", data="<...>"),
api.Asset(id="VceKeChKEeaOMw70NkE3iw", name="09_graph_decomposition_problems_1.pdf", type_name="pdf", url="url7", content_type="image/png", data="<...>"),
api.Asset(id="VcmGXShKEea4ehL5RXz3EQ", name="09_graph_decomposition_starter_files_1.zip", type_name="generic", url="url2", content_type="image/png", data="<...>"),
api.Asset(id="vdqUTz61Eea_CQ5dfWSAjQ", name="Capture.PNG", type_name="image", url="url9", content_type="image/png", data="<...>"),
api.Asset(id="bWTK9sYwEeW7AxLLCrgDQQ", name="M111.mp3", type_name="audio",
url="url4", content_type="image/png", data="<...>"),
api.Asset(id="VceKeChKEeaOMw70NkE3iw", name="09_graph_decomposition_problems_1.pdf",
type_name="pdf", url="url7", content_type="image/png", data="<...>"),
api.Asset(id="VcmGXShKEea4ehL5RXz3EQ", name="09_graph_decomposition_starter_files_1.zip",
type_name="generic", url="url2", content_type="image/png", data="<...>"),
api.Asset(id="vdqUTz61Eea_CQ5dfWSAjQ", name="Capture.PNG",
type_name="image", url="url9", content_type="image/png", data="<...>"),
]
retriever = api.AssetRetriever(session=None)