diff --git a/docs/deploying.rst b/docs/deploying.rst index 6153f04c..7dfe366f 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -20,7 +20,7 @@ your ``scrapinghub.yml``, you can leave out the parameter completely:: Packing version 3af023e-master Deploying to Scrapy Cloud project "12345" {"status": "ok", "project": 12345, "version": "3af023e-master", "spiders": 1} - Run your spiders at: https://app.scrapinghub.com/p/12345/ + Run your spiders at: https://app.zyte.com/p/12345/ You can also deploy your project from a Python egg, or build one without deploying:: @@ -29,7 +29,7 @@ deploying:: Using egg: egg_name Deploying to Scrapy Cloud project "12345" {"status": "ok", "project": 12345, "version": "1.0.0", "spiders": 1} - Run your spiders at: https://app.scrapinghub.com/p/12345/ + Run your spiders at: https://app.zyte.com/p/12345/ :: diff --git a/docs/scheduling.rst b/docs/scheduling.rst index a96f736d..41724d22 100644 --- a/docs/scheduling.rst +++ b/docs/scheduling.rst @@ -24,7 +24,7 @@ and ``-s`` options:: or print items as they are being scraped: shub items -f 2/15 or watch it running in Scrapinghub's web interface: - https://app.scrapinghub.com/p/12345/job/2/15 + https://app.zyte.com/p/12345/job/2/15 :: @@ -35,7 +35,7 @@ and ``-s`` options:: or print items as they are being scraped: shub items -f 2/15 or watch it running in Scrapinghub's web interface: - https://app.scrapinghub.com/p/33333/job/2/15 + https://app.zyte.com/p/33333/job/2/15 You can also specify the amount of Scrapy Cloud units (``-u``) and the priority (``-p``):: @@ -46,7 +46,7 @@ You can also specify the amount of Scrapy Cloud units (``-u``) and the priority or print items as they are being scraped: shub items -f 2/16 or watch it running in Scrapinghub's web interface: - https://app.scrapinghub.com/p/12345/job/2/16 + https://app.zyte.com/p/12345/job/2/16 shub provides commands to retrieve log entries, scraped items, or requests from jobs. If the job is still running, you can provide the ``-f`` (follow) option diff --git a/shub/config.py b/shub/config.py index 6bad5003..f4260b9a 100644 --- a/shub/config.py +++ b/shub/config.py @@ -28,7 +28,7 @@ class ShubConfig(object): - DEFAULT_ENDPOINT = 'https://app.scrapinghub.com/api/' + DEFAULT_ENDPOINT = 'https://app.zyte.com/api/' # Dictionary option name: Shortcut to set 'default' key SHORTCUTS = { @@ -57,11 +57,11 @@ def _check_endpoints(self): parsed = six.moves.urllib.parse.urlparse(url) if parsed.netloc == 'staging.scrapinghub.com': self.endpoints[endpoint] = six.moves.urllib.parse.urlunparse( - parsed._replace(netloc='app.scrapinghub.com') + parsed._replace(netloc='app.zyte.com') ) click.echo( 'WARNING: Endpoint "%s" is still using %s which has been ' - 'obsoleted. shub has updated it to app.scrapinghub.com ' + 'obsoleted. shub has updated it to app.zyte.com ' 'for this time only. Please update your configuration.' % ( endpoint, parsed.netloc, ), diff --git a/shub/deploy.py b/shub/deploy.py index 0e2d0797..757cb4db 100644 --- a/shub/deploy.py +++ b/shub/deploy.py @@ -107,7 +107,7 @@ def deploy_cmd(target, version, debug, egg, build_egg, verbose, keep_log, version, auth, verbose, keep_log, targetconf.stack, targetconf.requirements_file, targetconf.eggs, tmpdir) click.echo("Run your spiders at: " - "https://app.scrapinghub.com/p/%s/" + "https://app.zyte.com/p/%s/" "" % targetconf.project_id) finally: if tmpdir: @@ -201,6 +201,9 @@ def _add_sources( # Keep backward compatibility with pipenv<=2022.8.30 if isinstance(_requirements, list): tmp.write('\n'.join(_requirements).encode('utf-8')) + # Keep compatible with pipenv>=v2023.10.24 + elif isinstance(_requirements, dict): + tmp.write('\n'.join(_requirements.values()).encode('utf-8')) else: with open(_requirements, 'rb') as f: tmp.write(f.read()) diff --git a/shub/items.py b/shub/items.py index ad656b56..45a50e3f 100644 --- a/shub/items.py +++ b/shub/items.py @@ -15,7 +15,7 @@ You can also provide the Scrapinghub job URL instead: - shub items https://app.scrapinghub.com/p/12345/2/15 + shub items https://app.zyte.com/p/12345/2/15 You can omit the project ID if you have a default target defined in your scrapinghub.yml: diff --git a/shub/log.py b/shub/log.py index 569458c8..4a28ef17 100644 --- a/shub/log.py +++ b/shub/log.py @@ -17,7 +17,7 @@ You can also provide the Scrapinghub job URL instead: - shub log https://app.scrapinghub.com/p/12345/2/15 + shub log https://app.zyte.com/p/12345/2/15 You can omit the project ID if you have a default target defined in your scrapinghub.yml: diff --git a/shub/login.py b/shub/login.py index d208b21c..9d2093d5 100644 --- a/shub/login.py +++ b/shub/login.py @@ -16,7 +16,7 @@ with your Scrapinghub account. You can find your API key in Scrapinghub's dashboard: -https://app.scrapinghub.com/account/apikey +https://app.zyte.com/account/apikey """ SHORT_HELP = "Save your Scrapinghub API key" @@ -41,7 +41,7 @@ def cli(): def _get_apikey(suggestion='', endpoint=None): suggestion_txt = ' (%s)' % suggestion if suggestion else '' click.echo( - "Enter your API key from https://app.scrapinghub.com/account/apikey" + "Enter your API key from https://app.zyte.com/account/apikey" ) while True: key = input('API key%s: ' % suggestion_txt) or suggestion diff --git a/shub/requests.py b/shub/requests.py index 45425f45..e922594f 100644 --- a/shub/requests.py +++ b/shub/requests.py @@ -15,7 +15,7 @@ You can also provide the Scrapinghub job URL instead: - shub requests https://app.scrapinghub.com/p/12345/2/15 + shub requests https://app.zyte.com/p/12345/2/15 You can omit the project ID if you have a default target defined in your scrapinghub.yml: diff --git a/shub/utils.py b/shub/utils.py index f3d4168d..ae25fd6e 100644 --- a/shub/utils.py +++ b/shub/utils.py @@ -397,7 +397,7 @@ def _deploy_dependency_egg(project, endpoint, apikey, name=None, version=None, e files = {'egg': (egg_name, egg_fp)} make_deploy_request(url, data, files, auth, False, False) - success = "Deployed eggs list at: https://app.scrapinghub.com/p/%s/deploy/" + success = "Deployed eggs list at: https://app.zyte.com/p/%s/deploy/" click.echo(success % project) diff --git a/tests/image/test_deploy.py b/tests/image/test_deploy.py index d7bf39dd..6f9f186e 100644 --- a/tests/image/test_deploy.py +++ b/tests/image/test_deploy.py @@ -45,7 +45,7 @@ def test_cli(list_mocked, post_mocked, get_mocked): auth_cfg = '{"email": null, "password": " ", "username": "abcdef"}' post_mocked.assert_called_with( - 'https://app.scrapinghub.com/api/releases/deploy.json', + 'https://app.zyte.com/api/releases/deploy.json', allow_redirects=False, auth=('abcdef', ''), data={ @@ -78,7 +78,7 @@ def test_cli_insecure_registry(list_mocked, post_mocked, get_mocked): assert result.exit_code == 0 post_mocked.assert_called_with( - 'https://app.scrapinghub.com/api/releases/deploy.json', + 'https://app.zyte.com/api/releases/deploy.json', allow_redirects=False, auth=('abcdef', ''), data={ diff --git a/tests/image/test_list.py b/tests/image/test_list.py index 273b9abf..1d455a55 100644 --- a/tests/image/test_list.py +++ b/tests/image/test_list.py @@ -62,7 +62,7 @@ def test_cli(requests_get_mock, get_docker_client_mock, is_binary_logs): assert result.exit_code == 0 assert result.output.endswith('abc\ndef\n') requests_get_mock.assert_called_with( - 'https://app.scrapinghub.com/api/settings/get.json', + 'https://app.zyte.com/api/settings/get.json', allow_redirects=False, auth=('abcdef', ''), params={'project': 12345}, timeout=300) diff --git a/tests/test_config.py b/tests/test_config.py index c4c78d7f..f39a4038 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -254,7 +254,7 @@ def _get_conf(scrapycfg_default_target): [deploy:otherurl] project = 444 - url = http://app.scrapinghub.com/api/scrapyd/ + url = http://app.zyte.com/api/scrapyd/ [deploy:external] project = 555 @@ -283,7 +283,7 @@ def _get_conf(scrapycfg_default_target): expected_endpoints = { 'default': ShubConfig.DEFAULT_ENDPOINT, 'external': 'external_endpoint', - 'otherurl': 'http://app.scrapinghub.com/api/' + 'otherurl': 'http://app.zyte.com/api/' } expected_apikeys = { 'otheruser': 'otherkey', diff --git a/tests/test_deploy_reqs.py b/tests/test_deploy_reqs.py index 9f044a13..33b60519 100644 --- a/tests/test_deploy_reqs.py +++ b/tests/test_deploy_reqs.py @@ -33,7 +33,7 @@ def test_can_decompress_downloaded_packages_and_call_deploy_reqs(self): for args, kwargs in m.call_args_list: project, endpoint, apikey = args self.assertEqual(project, 1) - self.assertIn('https://app.scrapinghub.com', endpoint) + self.assertIn('https://app.zyte.com', endpoint) self.assertEqual(apikey, self.conf.apikeys['default']) def _write_tmp_requirements_file(self): diff --git a/tests/test_utils.py b/tests/test_utils.py index a9abee8e..a71f7512 100755 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -132,12 +132,12 @@ def _test_specs(job, expected_job_id, expected_endpoint): _test_specs('prod/2/3', '2/2/3', 'default') _test_specs('vagrant/2/3', '3/2/3', 'vagrant') _test_specs( - 'https://app.scrapinghub.com/p/7389/259/1/#/log/line/0', + 'https://app.zyte.com/p/7389/259/1/#/log/line/0', '7389/259/1', 'default', ) _test_specs( - 'https://app.scrapinghub.com/p/7389/job/259/1/', + 'https://app.zyte.com/p/7389/job/259/1/', '7389/259/1', 'default', )