id
int64 0
458k
| file_name
stringlengths 4
119
| file_path
stringlengths 14
227
| content
stringlengths 24
9.96M
| size
int64 24
9.96M
| language
stringclasses 1
value | extension
stringclasses 14
values | total_lines
int64 1
219k
| avg_line_length
float64 2.52
4.63M
| max_line_length
int64 5
9.91M
| alphanum_fraction
float64 0
1
| repo_name
stringlengths 7
101
| repo_stars
int64 100
139k
| repo_forks
int64 0
26.4k
| repo_open_issues
int64 0
2.27k
| repo_license
stringclasses 12
values | repo_extraction_date
stringclasses 433
values |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
500 | isbndb.py | internetarchive_openlibrary/scripts/providers/isbndb.py | import re
import json
import logging
import os
from typing import Any, Final
import requests
from json import JSONDecodeError
from openlibrary.config import load_config
from openlibrary.core.imports import Batch
from openlibrary.plugins.upstream.utils import get_marc21_language
from scripts.partner_batch_imports import is_published_in_future_year
from scripts.solr_builder.solr_builder.fn_to_cli import FnToCLI
logger = logging.getLogger("openlibrary.importer.isbndb")
SCHEMA_URL = (
"https://raw.githubusercontent.com/internetarchive"
"/openlibrary-client/master/olclient/schemata/import.schema.json"
)
NONBOOK: Final = ['dvd', 'dvd-rom', 'cd', 'cd-rom', 'cassette', 'sheet music', 'audio']
RE_YEAR = re.compile(r'(\d{4})')
def is_nonbook(binding: str, nonbooks: list[str]) -> bool:
"""
Determine whether binding, or a substring of binding, split on " ", is
contained within nonbooks.
"""
words = binding.split(" ")
return any(word.casefold() in nonbooks for word in words)
class ISBNdb:
ACTIVE_FIELDS = [
'authors',
'isbn_13',
'languages',
'number_of_pages',
'publish_date',
'publishers',
'source_records',
'subjects',
'title',
]
INACTIVE_FIELDS = [
"copyright",
"dewey",
"doi",
"height",
"issn",
"lccn",
"length",
"width",
'lc_classifications',
'pagination',
'weight',
]
REQUIRED_FIELDS = requests.get(SCHEMA_URL).json()['required']
def __init__(self, data: dict[str, Any]):
self.isbn_13 = [data.get('isbn13')]
self.source_id = f'idb:{self.isbn_13[0]}'
self.title = data.get('title')
self.publish_date = self._get_year(data) # 'YYYY'
self.publishers = self._get_list_if_present(data.get('publisher'))
self.authors = self.contributors(data)
self.number_of_pages = data.get('pages')
self.languages = self._get_languages(data)
self.source_records = [self.source_id]
self.subjects = [
subject.capitalize() for subject in (data.get('subjects') or []) if subject
]
self.binding = data.get('binding', '')
# Assert importable
for field in self.REQUIRED_FIELDS + ['isbn_13']:
assert getattr(self, field), field
assert is_nonbook(self.binding, NONBOOK) is False, "is_nonbook() returned True"
assert self.isbn_13 != [
"9780000000002"
], f"known bad ISBN: {self.isbn_13}" # TODO: this should do more than ignore one known-bad ISBN.
def _get_languages(self, data: dict[str, Any]) -> list[str] | None:
"""Extract a list of MARC 21 format languages from an ISBNDb JSONL line."""
language_line = data.get('language')
if not language_line:
return None
possible_languages = re.split(',| |;', language_line)
unique_languages = []
for language in possible_languages:
if (
marc21_language := get_marc21_language(language)
) and marc21_language not in unique_languages:
unique_languages.append(marc21_language)
return unique_languages or None
def _get_list_if_present(self, item: str | None) -> list[str] | None:
"""Return items as a list, or None."""
return [item] if item else None
def _get_year(self, data: dict[str, Any]) -> str | None:
"""Return a year str/int as a four digit string, or None."""
result = ""
if publish_date := data.get('date_published'):
if isinstance(publish_date, str):
m = RE_YEAR.search(publish_date)
result = m.group(1) if m else None # type: ignore[assignment]
else:
result = str(publish_date)[:4]
return result or None
def _get_subjects(self, data: dict[str, Any]) -> list[str] | None:
"""Return a list of subjects None."""
subjects = [
subject.capitalize() for subject in data.get('subjects', '') if subject
]
return subjects or None
@staticmethod
def contributors(data: dict[str, Any]) -> list[dict[str, Any]] | None:
"""Return a list of author-dicts or None."""
def make_author(name):
author = {'name': name}
return author
if contributors := data.get('authors'):
# form list of author dicts
authors = [make_author(c) for c in contributors if c[0]]
return authors
return None
def json(self):
"""Return a JSON representation of the object."""
return {
field: getattr(self, field)
for field in self.ACTIVE_FIELDS
if getattr(self, field)
}
def load_state(path: str, logfile: str) -> tuple[list[str], int]:
"""Retrieves starting point from logfile, if log exists
Takes as input a path which expands to an ordered candidate list
of isbndb* filenames to process, the location of the
logfile, and determines which of those files are remaining, as
well as what our offset is in that file.
e.g. if we request path containing f1, f2, f3 and our log
says f2,100 then we start our processing at f2 at the 100th line.
This assumes the script is being called w/ e.g.:
/1/var/tmp/imports/2021-08/Bibliographic/*/
"""
filenames = sorted(
os.path.join(path, f) for f in os.listdir(path) if f.startswith("isbndb")
)
try:
with open(logfile) as fin:
active_fname, offset = next(fin).strip().split(',')
unfinished_filenames = filenames[filenames.index(active_fname) :]
return unfinished_filenames, int(offset)
except (ValueError, OSError):
return filenames, 0
def get_line(line: bytes) -> dict | None:
"""converts a line to a book item"""
json_object = None
try:
json_object = json.loads(line)
except JSONDecodeError as e:
logger.info(f"json decoding failed for: {line!r}: {e!r}")
return json_object
def get_line_as_biblio(line: bytes, status: str) -> dict | None:
if json_object := get_line(line):
b = ISBNdb(json_object)
return {'ia_id': b.source_id, 'status': status, 'data': b.json()}
return None
def update_state(logfile: str, fname: str, line_num: int = 0) -> None:
"""Records the last file we began processing and the current line"""
with open(logfile, 'w') as fout:
fout.write(f'{fname},{line_num}\n')
# TODO: It's possible `batch_import()` could be modified to take a parsing function
# and a filter function instead of hardcoding in `csv_to_ol_json_item()` and some filters.
def batch_import(path: str, batch: Batch, import_status: str, batch_size: int = 5000):
logfile = os.path.join(path, 'import.log')
filenames, offset = load_state(path, logfile)
for fname in filenames:
book_items = []
with open(fname, 'rb') as f:
logger.info(f"Processing: {fname} from line {offset}")
for line_num, line in enumerate(f):
# skip over already processed records
if offset:
if offset > line_num:
continue
offset = 0
try:
book_item = get_line_as_biblio(line=line, status=import_status)
assert book_item is not None
if not any(
[
"independently published"
in book_item['data'].get('publishers', ''),
is_published_in_future_year(book_item["data"]),
]
):
book_items.append(book_item)
except (AssertionError, IndexError) as e:
logger.info(f"Error: {e!r} from {line!r}")
# If we have enough items, submit a batch
if not ((line_num + 1) % batch_size):
batch.add_items(book_items)
update_state(logfile, fname, line_num)
book_items = [] # clear added items
# Add any remaining book_items to batch
if book_items:
batch.add_items(book_items)
update_state(logfile, fname, line_num)
def main(ol_config: str, batch_path: str, import_status: str = 'staged') -> None:
load_config(ol_config)
# Partner data is offset ~15 days from start of month
batch_name = "isbndb_bulk_import"
batch = Batch.find(batch_name) or Batch.new(batch_name)
batch_import(path=batch_path, batch=batch, import_status=import_status)
if __name__ == '__main__':
FnToCLI(main).run()
| 8,832 | Python | .py | 206 | 33.621359 | 105 | 0.600723 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
501 | test_isbndb.py | internetarchive_openlibrary/scripts/tests/test_isbndb.py | from pathlib import Path
import pytest
from ..providers.isbndb import ISBNdb, get_line, NONBOOK, is_nonbook
# Sample lines from the dump
line0 = '''{"isbn": "0000001562", "msrp": "0.00", "image": "Https://images.isbndb.com/covers/15/66/9780000001566.jpg", "title": "教えます!花嫁衣装 のトレンドニュース", "isbn13": "9780000001566", "authors": ["Orvig", "Glen Martin", "Ron Jenson"], "binding": "Mass Market Paperback", "edition": "1", "language": "en", "subjects": ["PQ", "878"], "synopsis": "Francesco Petrarca.", "publisher": "株式会社オールアバウト", "dimensions": "97 p.", "title_long": "教えます!花嫁衣装のトレンドニュース", "date_published": 2015}''' # noqa: E501
line1 = '''{"isbn": "0000002259", "msrp": "0.00", "title": "確定申告、住宅ローン控除とは?", "isbn13": "9780000002259", "authors": ["田中 卓也 ~autofilled~"], "language": "en", "publisher": "株式会社オールアバウト", "title_long": "確定申告、住宅ローン控除とは?"}''' # noqa: E501
line2 = '''{"isbn": "0000000108", "msrp": "1.99", "image": "Https://images.isbndb.com/covers/01/01/9780000000101.jpg", "pages": 8, "title": "Nga Aboriginal Art Cal 2000", "isbn13": "9780000000101", "authors": ["Nelson, Bob, Ph.D."], "binding": "Hardcover", "edition": "1", "language": "en", "subjects": ["Mushroom culture", "Edible mushrooms"], "publisher": "Nelson Motivation Inc.", "dimensions": "Height: 6.49605 Inches, Length: 0.03937 Inches, Weight: 0.1763698096 Pounds, Width: 6.49605 Inches", "title_long": "Nga Aboriginal Art Cal 2000", "date_published": "2002"}''' # noqa: E501
# The sample lines from above, mashalled into Python dictionaries
line0_unmarshalled = {
'isbn': '0000001562',
'msrp': '0.00',
'image': 'Https://images.isbndb.com/covers/15/66/9780000001566.jpg',
'title': '教えます!花嫁衣装 のトレンドニュース',
'isbn13': '9780000001566',
'authors': ['Orvig', 'Glen Martin', 'Ron Jenson'],
'binding': 'Mass Market Paperback',
'edition': '1',
'language': 'en',
'subjects': ['PQ', '878'],
'synopsis': 'Francesco Petrarca.',
'publisher': '株式会社オールアバウト',
'dimensions': '97 p.',
'title_long': '教えます!花嫁衣装のトレンドニュース',
'date_published': 2015,
}
line1_unmarshalled = {
'isbn': '0000002259',
'msrp': '0.00',
'title': '確定申告、住宅ローン控除とは?',
'isbn13': '9780000002259',
'authors': ['田中 卓也 ~autofilled~'],
'language': 'en',
'publisher': '株式会社オールアバウト',
'title_long': '確定申告、住宅ローン控除とは?',
}
line2_unmarshalled = {
'isbn': '0000000108',
'msrp': '1.99',
'image': 'Https://images.isbndb.com/covers/01/01/9780000000101.jpg',
'pages': 8,
'title': 'Nga Aboriginal Art Cal 2000',
'isbn13': '9780000000101',
'authors': ['Nelson, Bob, Ph.D.'],
'binding': 'Hardcover',
'edition': '1',
'language': 'en',
'subjects': ['Mushroom culture', 'Edible mushrooms'],
'publisher': 'Nelson Motivation Inc.',
'dimensions': 'Height: 6.49605 Inches, Length: 0.03937 Inches, Weight: 0.1763698096 Pounds, Width: 6.49605 Inches',
'title_long': 'Nga Aboriginal Art Cal 2000',
'date_published': '2002',
}
sample_lines = [line0, line1, line2]
sample_lines_unmarshalled = [line0_unmarshalled, line1_unmarshalled, line2_unmarshalled]
@pytest.fixture
def get_isbndb_data():
"""
Get a data dictionary suitable, in ISBNdb JSONL format, for passing to the ISBNdb class.
This has the minimally necessary fields from import.schema.json.
"""
return {
'title': 'test title',
'date_published': '2000',
'publisher': 'test publisher',
'authors': 'Test Author',
}
def test_isbndb_to_ol_item(tmp_path):
# Set up a three-line file to read.
isbndb_file: Path = tmp_path / "isbndb.jsonl"
data = '\n'.join(sample_lines)
isbndb_file.write_text(data)
with open(isbndb_file, 'rb') as f:
for line_num, line in enumerate(f):
assert get_line(line) == sample_lines_unmarshalled[line_num]
@pytest.mark.parametrize(
'binding, expected',
[
("DVD", True),
("dvd", True),
("audio cassette", True),
("audio", True),
("cassette", True),
("paperback", False),
],
)
def test_is_nonbook(binding, expected) -> None:
"""
Just ensure basic functionality works in terms of matching strings
and substrings, case insensitivity, etc.
"""
assert is_nonbook(binding, NONBOOK) == expected
@pytest.mark.parametrize(
'language, expected',
[
('en_US', ['eng']),
('es,Eng', ['spa', 'eng']),
('afrikaans afr af en', ['afr', 'eng']),
('not a language', None),
('', None),
(None, None),
],
)
def test_isbndb_get_languages(language, expected, get_isbndb_data):
isbndb_line = get_isbndb_data
isbndb_line['language'] = language
item = ISBNdb(isbndb_line)
assert item._get_languages(isbndb_line) == expected
@pytest.mark.parametrize(
'year, expected',
[
(2000, "2000"),
("2000", "2000"),
("December 2000", "2000"),
("-", None),
("123", None),
(None, None),
],
)
def test_isbndb_get_year(year, expected, get_isbndb_data):
isbndb_line = get_isbndb_data
item = ISBNdb(isbndb_line)
# Do this 'out of order' to instantiate the class with a valid date; the
# only purpose here is to unit test _get_year(). Validation is handled in
# the class elsewhere.
isbndb_line['date_published'] = year
assert item._get_year(isbndb_line) == expected
| 5,763 | Python | .py | 126 | 37.777778 | 587 | 0.632711 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
502 | test_affiliate_server.py | internetarchive_openlibrary/scripts/tests/test_affiliate_server.py | """
Requires pytest-mock to be installed: `pip install pytest-mock`
for access to the mocker fixture.
# docker compose run --rm home pytest scripts/tests/test_affiliate_server.py
"""
import json
import sys
from typing import Any
from unittest.mock import MagicMock
import pytest
# TODO: Can we remove _init_path someday :(
sys.modules['_init_path'] = MagicMock()
from openlibrary.mocks.mock_infobase import mock_site # noqa: F401
from scripts.affiliate_server import ( # noqa: E402
PrioritizedIdentifier,
Priority,
Submit,
get_isbns_from_book,
get_isbns_from_books,
get_editions_for_books,
get_pending_books,
make_cache_key,
process_google_book,
)
ol_editions = {
f"123456789{i}": {
"type": "/type/edition",
"key": f"/books/OL{i}M",
"isbn_10": [f"123456789{i}"],
"isbn_13": [f"123456789012{i}"],
"covers": [int(f"1234567{i}")],
"title": f"Book {i}",
"authors": [{"key": f"/authors/OL{i}A"}],
"publishers": [f"Publisher {i}"],
"publish_date": f"Aug 0{i}, 2023",
"number_of_pages": int(f"{i}00"),
}
for i in range(8)
}
ol_editions["1234567891"].pop("covers")
ol_editions["1234567892"].pop("title")
ol_editions["1234567893"].pop("authors")
ol_editions["1234567894"].pop("publishers")
ol_editions["1234567895"].pop("publish_date")
ol_editions["1234567896"].pop("number_of_pages")
amz_books = {
f"123456789{i}": {
"isbn_10": [f"123456789{i}"],
"isbn_13": [f"12345678901{i}"],
"cover": [int(f"1234567{i}")],
"title": f"Book {i}",
"authors": [{'name': f"Last_{i}a, First"}, {'name': f"Last_{i}b, First"}],
"publishers": [f"Publisher {i}"],
"publish_date": f"Aug 0{i}, 2023",
"number_of_pages": int(f"{i}00"),
}
for i in range(8)
}
def test_ol_editions_and_amz_books():
assert len(ol_editions) == len(amz_books) == 8
def test_get_editions_for_books(mock_site): # noqa: F811
"""
Attempting to save many ol editions and then get them back...
"""
start = len(mock_site.docs)
mock_site.save_many(ol_editions.values())
assert len(mock_site.docs) - start == len(ol_editions)
editions = get_editions_for_books(amz_books.values())
assert len(editions) == len(ol_editions)
assert sorted(edition.key for edition in editions) == [
f"/books/OL{i}M" for i in range(8)
]
def test_get_pending_books(mock_site): # noqa: F811
"""
Testing get_pending_books() with no ol editions saved and then with ol editions.
"""
# All books will be pending if they have no corresponding ol editions
assert len(get_pending_books(amz_books.values())) == len(amz_books)
# Save corresponding ol editions into the mock site
start = len(mock_site.docs)
mock_site.save_many(ol_editions.values()) # Save the ol editions
assert len(mock_site.docs) - start == len(ol_editions)
books = get_pending_books(amz_books.values())
assert len(books) == 6 # Only 6 books are missing covers, titles, authors, etc.
def test_get_isbns_from_book():
"""
Testing get_isbns_from_book() with a book that has both isbn_10 and isbn_13.
"""
book = {
"isbn_10": ["1234567890"],
"isbn_13": ["1234567890123"],
}
assert get_isbns_from_book(book) == ["1234567890", "1234567890123"]
def test_get_isbns_from_books():
"""
Testing get_isbns_from_books() with a list of books that have both isbn_10 and isbn_13.
"""
books = [
{
"isbn_10": ["1234567890"],
"isbn_13": ["1234567890123"],
},
{
"isbn_10": ["1234567891"],
"isbn_13": ["1234567890124"],
},
]
assert get_isbns_from_books(books) == [
'1234567890',
'1234567890123',
'1234567890124',
'1234567891',
]
def test_prioritized_identifier_equality_set_uniqueness() -> None:
"""
`PrioritizedIdentifier` is unique in a set when no other class instance
in the set has the same identifier.
"""
identifier_1 = PrioritizedIdentifier(identifier="1111111111")
identifier_2 = PrioritizedIdentifier(identifier="2222222222")
set_one = set()
set_one.update([identifier_1, identifier_1])
assert len(set_one) == 1
set_two = set()
set_two.update([identifier_1, identifier_2])
assert len(set_two) == 2
def test_prioritized_identifier_serialize_to_json() -> None:
"""
`PrioritizedIdentifier` needs to be be serializable to JSON because it is sometimes
called in, e.g. `json.dumps()`.
"""
p_identifier = PrioritizedIdentifier(
identifier="1111111111", priority=Priority.HIGH
)
dumped_identifier = json.dumps(p_identifier.to_dict())
dict_identifier = json.loads(dumped_identifier)
assert dict_identifier["priority"] == "HIGH"
assert isinstance(dict_identifier["timestamp"], str)
@pytest.mark.parametrize(
["isbn_or_asin", "expected_key"],
[
({"isbn_10": [], "isbn_13": ["9780747532699"]}, "9780747532699"), # Use 13.
(
{"isbn_10": ["0747532699"], "source_records": ["amazon:B06XYHVXVJ"]},
"9780747532699",
), # 10 -> 13.
(
{"isbn_10": [], "isbn_13": [], "source_records": ["amazon:B06XYHVXVJ"]},
"B06XYHVXVJ",
), # Get non-ISBN 10 ASIN from `source_records` if necessary.
({"isbn_10": [], "isbn_13": [], "source_records": []}, ""), # Nothing to use.
({}, ""), # Nothing to use.
],
)
def test_make_cache_key(isbn_or_asin: dict[str, Any], expected_key: str) -> None:
got = make_cache_key(isbn_or_asin)
assert got == expected_key
# Sample Google Book data with all fields present
complete_book_data = {
"kind": "books#volumes",
"totalItems": 1,
"items": [
{
"kind": "books#volume",
"id": "YJ1uQwAACAAJ",
"etag": "a6JFgm2Cyu0",
"selfLink": "https://www.googleapis.com/books/v1/volumes/YJ1uQwAACAAJ",
"volumeInfo": {
"title": "Бал моей мечты",
"subtitle": "[для сред. шк. возраста]",
"authors": ["Светлана Лубенец"],
"publishedDate": "2009",
"industryIdentifiers": [
{"type": "ISBN_10", "identifier": "5699350136"},
{"type": "ISBN_13", "identifier": "9785699350131"},
],
"pageCount": 153,
"publisher": "Some Publisher",
"description": "A cool book",
},
"saleInfo": {
"country": "US",
"saleability": "NOT_FOR_SALE",
"isEbook": False,
},
"accessInfo": {
"country": "US",
"viewability": "NO_PAGES",
},
}
],
}
# Expected output for the complete book data
expected_output_complete = {
"isbn_10": ["5699350136"],
"isbn_13": ["9785699350131"],
"title": "Бал моей мечты",
"subtitle": "[для сред. шк. возраста]",
"authors": [{"name": "Светлана Лубенец"}],
"source_records": ["google_books:9785699350131"],
"publishers": ["Some Publisher"],
"publish_date": "2009",
"number_of_pages": 153,
"description": "A cool book",
}
# Parametrized tests for different missing fields
@pytest.mark.parametrize(
"input_data, expected_output",
[
(complete_book_data, expected_output_complete),
# Missing ISBN_13
(
{
"kind": "books#volumes",
"totalItems": 1,
"items": [
{
"volumeInfo": {
"title": "Бал моей мечты",
"authors": ["Светлана Лубенец"],
"publishedDate": "2009",
"industryIdentifiers": [
{"type": "ISBN_10", "identifier": "5699350136"}
],
"pageCount": 153,
"publisher": "Some Publisher",
}
}
],
},
{
"isbn_10": ["5699350136"],
"isbn_13": [],
"title": "Бал моей мечты",
"subtitle": None,
"authors": [{"name": "Светлана Лубенец"}],
"source_records": ["google_books:5699350136"],
"publishers": ["Some Publisher"],
"publish_date": "2009",
"number_of_pages": 153,
"description": None,
},
),
# Missing authors
(
{
"kind": "books#volumes",
"totalItems": 1,
"items": [
{
"volumeInfo": {
"title": "Бал моей мечты",
"publishedDate": "2009",
"industryIdentifiers": [
{"type": "ISBN_10", "identifier": "5699350136"},
{"type": "ISBN_13", "identifier": "9785699350131"},
],
"pageCount": 153,
"publisher": "Some Publisher",
}
}
],
},
{
"isbn_10": ["5699350136"],
"isbn_13": ["9785699350131"],
"title": "Бал моей мечты",
"subtitle": None,
"authors": [],
"source_records": ["google_books:9785699350131"],
"publishers": ["Some Publisher"],
"publish_date": "2009",
"number_of_pages": 153,
"description": None,
},
),
# Missing everything but the title and ISBN 13.
(
{
"kind": "books#volumes",
"totalItems": 1,
"items": [
{
"volumeInfo": {
"title": "Бал моей мечты",
"industryIdentifiers": [
{"type": "ISBN_13", "identifier": "9785699350131"}
],
}
}
],
},
{
"isbn_10": [],
"isbn_13": ["9785699350131"],
"title": "Бал моей мечты",
"subtitle": None,
"authors": [],
"source_records": ["google_books:9785699350131"],
"publishers": [],
"publish_date": "",
"number_of_pages": None,
"description": None,
},
),
],
)
def test_process_google_book(input_data, expected_output):
"""
Test a few permutations to make sure the function can handle missing fields.
It is assumed there will always be an ISBN 10 or 13 as that is what this queries
by. If both are absent this will crash.
"""
assert process_google_book(input_data) == expected_output
def test_process_google_book_no_items():
"""Sometimes there will be no results from Google Books."""
input_data = {"kind": "books#volumes", "totalItems": 0, "items": []}
assert process_google_book(input_data) is None
def test_process_google_book_multiple_items():
"""We should only get one result per ISBN."""
input_data = {
"kind": "books#volumes",
"totalItems": 2,
"items": [
{"volumeInfo": {"title": "Book One"}},
{"volumeInfo": {"title": "Book Two"}},
],
}
assert process_google_book(input_data) is None
| 12,095 | Python | .py | 329 | 25.924012 | 91 | 0.519539 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
503 | test_import_open_textbook_library.py | internetarchive_openlibrary/scripts/tests/test_import_open_textbook_library.py | import pytest
from ..import_open_textbook_library import map_data
@pytest.mark.parametrize(
"input_data, expected_output",
[
(
# Test case 1: Basic case with all fields present
{
"id": 1238,
"title": "Healthcare in the United States: Navigating the Basics of a Complex System",
"edition_statement": None,
"volume": None,
"copyright_year": 2022,
"ISBN10": None,
"ISBN13": "9781940771915",
"license": "Attribution-ShareAlike",
"language": "eng",
"description": "This book is a collaborative effort among three faculty members from the Darton College",
"contributors": [
{
"id": 5889,
"contribution": "Author",
"primary": False,
"corporate": False,
"title": "Dr.",
"first_name": "Deanna",
"middle_name": "L.",
"last_name": "Howe",
"location": "Albany, NY",
},
{
"id": 5890,
"contribution": "Author",
"primary": False,
"corporate": False,
"title": "Dr.",
"first_name": "Andrea",
"middle_name": "L.",
"last_name": "Dozier",
"location": "Albany, NY",
},
{
"id": 5891,
"contribution": None,
"primary": False,
"corporate": False,
"title": "Dr.",
"first_name": "Sheree",
"middle_name": "O.",
"last_name": "Dickenson",
"location": "Albany, NY",
},
],
"subjects": [
{
"id": 17,
"name": "Medicine",
"parent_subject_id": None,
"call_number": "RA440",
"visible_textbooks_count": 74,
"url": "https://open.umn.edu/opentextbooks/subjects/medicine",
}
],
"publishers": [
{
"id": 1217,
"name": "University of North Georgia Press",
"url": "https://ung.edu/university-press/",
"year": None,
"created_at": "2022-08-25T14:37:55.000Z",
"updated_at": "2022-08-25T14:37:55.000Z",
}
],
},
{
"identifiers": {"open_textbook_library": ["1238"]},
"source_records": ["open_textbook_library:1238"],
"title": "Healthcare in the United States: Navigating the Basics of a Complex System",
"isbn_13": ["9781940771915"],
"languages": ["eng"],
"description": "This book is a collaborative effort among three faculty members from the Darton College",
"subjects": ["Medicine"],
"publishers": ["University of North Georgia Press"],
"publish_date": "2022",
"authors": [
{"name": "Deanna L. Howe"},
{"name": "Andrea L. Dozier"},
],
"contributors": [{'role': None, 'name': 'Sheree O. Dickenson'}],
"lc_classifications": ["RA440"],
},
),
# Test case 2: Missing some optional fields
(
{
"id": 895,
"title": "The ELC: An Early Childhood Learning Community at Work",
"language": "eng",
"description": "The ELC professional development model was designed to improve the quality of teacher candidates",
"contributors": [
{
"id": 5247,
"contribution": "Author",
"primary": False,
"corporate": False,
"title": None,
"first_name": "Heather",
"middle_name": None,
"last_name": "Bridge",
"location": None,
"background_text": "Heather Bridge",
},
{
"id": 5248,
"contribution": "Author",
"primary": False,
"corporate": False,
"title": None,
"first_name": "Lorraine",
"middle_name": None,
"last_name": "Melita",
"location": None,
"background_text": "Lorraine Melita",
},
{
"id": 5249,
"contribution": "Author",
"primary": False,
"corporate": False,
"title": None,
"first_name": "Patricia",
"middle_name": None,
"last_name": "Roiger",
"location": None,
"background_text": "Patricia Roiger",
},
],
"subjects": [
{
"id": 57,
"name": "Early Childhood",
"parent_subject_id": 5,
"call_number": "LB1139.2",
"visible_textbooks_count": 11,
"url": "https://open.umn.edu/opentextbooks/subjects/early-childhood",
}
],
"publishers": [
{
"id": 874,
"name": "Open SUNY",
"url": "https://textbooks.opensuny.org",
"year": 2020,
"created_at": "2020-07-21T23:48:48.000Z",
"updated_at": "2020-07-21T23:48:48.000Z",
}
],
},
{
"identifiers": {"open_textbook_library": ["895"]},
"source_records": ["open_textbook_library:895"],
"title": "The ELC: An Early Childhood Learning Community at Work",
"languages": ["eng"],
"description": "The ELC professional development model was designed to improve the quality of teacher candidates",
"subjects": ["Early Childhood"],
"publishers": ["Open SUNY"],
"authors": [
{"name": "Heather Bridge"},
{"name": "Lorraine Melita"},
{"name": "Patricia Roiger"},
],
"lc_classifications": ["LB1139.2"],
},
),
# Test case 3: None values
(
{
'id': 730,
'title': 'Mythology Unbound: An Online Textbook for Classical Mythology',
'ISBN10': None,
'ISBN13': None,
'language': None,
'contributors': [
{
'first_name': 'EVANS',
'middle_name': None,
'last_name': None,
'contribution': None,
'primary': True,
},
{
'first_name': 'Eve',
'middle_name': None,
'last_name': 'Johnson',
'contribution': None,
'primary': False,
},
],
},
{
"identifiers": {"open_textbook_library": ["730"]},
"source_records": ["open_textbook_library:730"],
"title": "Mythology Unbound: An Online Textbook for Classical Mythology",
"authors": [{"name": "EVANS"}],
"contributors": [{'name': 'Eve Johnson', 'role': None}],
},
),
],
)
def test_map_data(input_data, expected_output):
result = map_data(input_data)
assert result == expected_output
| 8,835 | Python | .py | 212 | 21.891509 | 130 | 0.373275 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
504 | test_promise_batch_imports.py | internetarchive_openlibrary/scripts/tests/test_promise_batch_imports.py | import pytest
from ..promise_batch_imports import format_date
@pytest.mark.parametrize(
"date, only_year, expected",
[
("20001020", False, "2000-10-20"),
("20000101", True, "2000"),
("20000000", True, "2000"),
],
)
def test_format_date(date, only_year, expected) -> None:
assert format_date(date=date, only_year=only_year) == expected
| 378 | Python | .py | 12 | 26.916667 | 66 | 0.644628 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
505 | test_obfi.py | internetarchive_openlibrary/scripts/tests/test_obfi.py | import io
import os
import socket
import sys
import time
import urllib.request
from collections.abc import Callable, Generator
from pathlib import Path
import pytest
os.environ['SEED_PATH'] = 'must be truthy for obfi/decode_ip scripts to run'
from ..obfi import hide, mktable, reveal, shownames
def mock_urlopen(*args, **kwargs):
"""Mock for urllib.request.urlopen to always return seed=1234."""
class MockRead:
def read(self):
return b"seed=1234"
def __enter__(self):
return self
def __exit__(self, *args):
pass
return MockRead()
@pytest.fixture
def get_patched_hide(monkeypatch) -> hide.HashIP:
"""
Patch hide's call to urllib so we can use the same key and not rely
on network connectivity.
Give mktable a temp custom prefix to use when saving the real_ip db.
"""
monkeypatch.setattr(urllib.request, "urlopen", mock_urlopen)
hash_ip = hide.HashIP()
return hash_ip
@pytest.fixture
def get_patched_mktable(monkeypatch, tmp_path) -> mktable.HashIP:
"""
Patch mktable's call to url so we can use the same key and not rely
on network connectivity.
Give mktable a temp custom prefix to use when saving the real_ip db.
"""
monkeypatch.setattr(urllib.request, "urlopen", mock_urlopen)
file: Path = tmp_path / "hide_ip_map_"
hash_ip = mktable.HashIP(real_ip_prefix=file.as_posix())
return hash_ip
class TestHide:
def test_get_seed(self, get_patched_hide) -> None:
hash_ip = get_patched_hide
assert hash_ip.seed == b"1234"
with pytest.raises(AssertionError):
assert hash_ip.seed == b"raise error"
def test_hide(self, get_patched_hide) -> None:
hash_ip = get_patched_hide
assert hash_ip.hide("207.241.224.2") == "0.128.68.105"
class TestReveal:
fake_lighttpd_access_log = """0.245.206.5 localhost - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"
0.81.159.57 - - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"
0.168.131.52 example.com - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"
0.128.68.105 archive.org - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"
0.1.2.3 not_in_real_ips - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"
"""
expected_output_no_replace = """0.245.206.5(127.0.0.1) localhost - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"
0.81.159.57(8.8.8.8) - - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"
0.168.131.52(93.184.216.34) example.com - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"
0.128.68.105(207.241.224.2) archive.org - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"
0.1.2.3 not_in_real_ips - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"
"""
expected_output_with_replace = """127.0.0.1 localhost - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"
8.8.8.8 - - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"
93.184.216.34 example.com - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"
207.241.224.2 archive.org - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"
0.1.2.3 not_in_real_ips - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"
"""
real_ips = {
"0.81.159.57": b"8.8.8.8",
"0.168.131.52": b"93.184.216.34",
"0.128.68.105": b"207.241.224.2",
"0.245.206.5": b"127.0.0.1",
}
def test_reveal_no_replace(self, monkeypatch, capsys) -> None:
monkeypatch.setattr(sys, "stdin", io.StringIO(self.fake_lighttpd_access_log))
revealer = reveal.IPRevealer(self.real_ips, replace=False)
revealer.run()
captured = capsys.readouterr()
assert captured.out == self.expected_output_no_replace
def test_reveal_with_replace(self, monkeypatch, capsys) -> None:
monkeypatch.setattr(sys, "stdin", io.StringIO(self.fake_lighttpd_access_log))
revealer = reveal.IPRevealer(self.real_ips, replace=True)
revealer.run()
captured = capsys.readouterr()
assert captured.out == self.expected_output_with_replace
class TestMkTable:
"""
Tests for mktable. All tests use a mocked urllib and temporary file for
hide_ip_map_<yday>.
"""
def test_get_seed(self, get_patched_mktable) -> None:
"""urllib.requests.urlopen has been patched to return a seed of 1234."""
hash_ip = get_patched_mktable
assert hash_ip.seed == b"1234"
def test_seed_changes_when_yday_changes(
self, monkeypatch, get_patched_mktable
) -> None:
"""Ensure the seed changes each day."""
hash_ip = get_patched_mktable
# Ensure the seed stays the same when hide() is executed and the day
# has not changed.
original_seed = hash_ip.seed
hash_ip.hide("8.8.8.8")
assert original_seed == hash_ip.seed
# Patch gmtime() so that index 7 returns day yday 70000, which should
# cause get_seed() to run again when hide() is executed. Overwrite
# the previous seed to ensure a new seed is set when
# hide() -> get_seed() are executed.
monkeypatch.setattr(time, "gmtime", lambda: [0, 1, 2, 3, 4, 5, 6, 70_000])
hash_ip.seed = 70_000
hash_ip.hide("127.0.0.1")
assert hash_ip.seed != 70_000
def test_hidden_hosts_are_written_to_hide_ip_map(
self, get_patched_mktable, monkeypatch, capsys
) -> None:
"""
Add unique and duplicate IPs. Only the unique IPs should be echoed
back to STDOUT; duplicated IPs are already in the DB.
"""
hash_ip = get_patched_mktable
# 127.0.0.1 is duplicated
real_ips = "127.0.0.1\n207.241.224.2\n127.0.0.1\n8.8.8.8\n"
expected = (
"127.0.0.1 0.245.206.5\n207.241.224.2 0.128.68.105\n8.8.8.8 0.81.159.57\n"
)
monkeypatch.setattr(sys, "stdin", io.StringIO(real_ips))
hash_ip.process_input()
captured = capsys.readouterr()
assert captured.out == expected
class TestShownames:
"""
Tests for shownames. socket.getbyhostaddr is mocked, so this only tests
that if an ostensibly valid IP address is found in STDIN, that its
resolved hostname is appended and the line is printed to STDOUT.
"""
def get_hostname(self, ip):
"""Give some static hostname responses."""
if ip == "207.241.224.2":
return ("www.archive.org", [], ["207.241.224.2"])
elif ip == "8.8.8.8":
return ("dns.google", [], ["8.8.8.8"])
else:
raise ValueError("Unknown host")
def test_shownames(self, monkeypatch, capsys) -> None:
"""
When an IP resolves, stick it in [brackets] next to the IP.
This tests both an IP that resolves and one that doesn't.
"""
revealed_lighttpd_log = """0.128.68.105(207.241.224.2) archive.org - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"
0.168.131.52(93.184.216.34) example.com - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"\n"""
expected = """0.128.68.105(www.archive.org[207.241.224.2]) archive.org - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"
0.168.131.52(93.184.216.34) example.com - [04/Apr/2023:12:34:56 +0000] "GET /example.html HTTP/1.1" 200 1234 "http://www.example.com/" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"\n"""
monkeypatch.setattr(socket, "gethostbyaddr", self.get_hostname)
monkeypatch.setattr(sys, "stdin", io.StringIO(revealed_lighttpd_log))
shownames.run()
captured = capsys.readouterr()
assert captured.out == expected
| 9,766 | Python | .py | 160 | 54.5375 | 252 | 0.660113 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
506 | test_import_standard_ebooks.py | internetarchive_openlibrary/scripts/tests/test_import_standard_ebooks.py | from scripts.import_standard_ebooks import map_data
SAMPLE_1 = {
'id': 'https://standardebooks.org/ebooks/t-e-lawrence/seven-pillars-of-wisdom',
'guidislink': True,
'link': 'https://standardebooks.org/ebooks/t-e-lawrence/seven-pillars-of-wisdom',
'dcterms_identifier': 'url:https://standardebooks.org/ebooks/t-e-lawrence/seven-pillars-of-wisdom',
'title': 'Seven Pillars of Wisdom',
'title_detail': {
'type': 'text/plain',
'language': None,
'base': '',
'value': 'Seven Pillars of Wisdom',
},
'authors': [
{
'name': 'T. E. Lawrence',
'href': 'https://standardebooks.org/ebooks/t-e-lawrence',
}
],
'author_detail': {
'name': 'T. E. Lawrence',
'href': 'https://standardebooks.org/ebooks/t-e-lawrence',
},
'href': 'https://standardebooks.org/ebooks/t-e-lawrence',
'author': 'T. E. Lawrence',
'schema_alternatename': 'Thomas Edward Lawrence',
'schema_sameas': 'http://id.loc.gov/authorities/names/n79097491',
'published': '2022-01-01T22:32:49Z',
'updated': '2024-06-03T21:26:42Z',
'dcterms_language': 'en-GB',
'dcterms_publisher': 'Standard Ebooks',
'rights': 'Public domain in the United States. Users located outside of the United States must check their local laws before using this ebook. Original content released to the public domain via the Creative Commons CC0 1.0 Universal Public Domain Dedication.', # noqa: E501
'rights_detail': {
'type': 'text/plain',
'language': None,
'base': '',
'value': 'Public domain in the United States. Users located outside of the United States must check their local laws before using this ebook. Original content released to the public domain via the Creative Commons CC0 1.0 Universal Public Domain Dedication.', # noqa: E501
},
'summary': 'T. E. Lawrence’s memoir of leading the Arab revolt against the Ottoman empire during World War I.',
'summary_detail': {
'type': 'text/plain',
'language': None,
'base': '',
'value': 'T. E. Lawrence’s memoir of leading the Arab revolt against the Ottoman empire during World War I.',
},
'content': [
{
'type': 'text/html',
'language': None,
'base': '',
'value': '<p><i>Seven Pillars of Wisdom</i> is <a href="https://standardebooks.org/ebooks/t-e-lawrence"><abbr>T. E.</abbr> Lawrence’s</a> memoir of his involvement in leading a portion of the Arab revolt against the Ottoman empire during World War I. The empire had joined the side of Germany and the Central Powers in the war, and Britain hoped that a successful revolt would take the empire out of the war effort. Britain had also promised the Arabs that, if they were successful, England would recognize a single Arab state.</p> <p>Lawrence convinced the Arab leaders, who had historically not shown a willingness to work together, to join forces in supporting Britain’s strategy in the area. His memoir is part travelogue, part philosophy treatise, and part action novel. It details his movements and actions during his two year involvement, his relationships with the various Arab leaders and men who fought with him, and his thoughts—and doubts—during that time. It’s a gripping tale made famous by the movie <i>Lawrence of Arabia</i>, and one that Winston Churchill called “unsurpassable” as a “narrative of war and adventure.”</p> <p>The manuscript of <i>Seven Pillars of Wisdom</i> has a rich history. Lawrence finished his first draft in 1919 from his notes during the war, but lost most of it when changing trains in England (it was never found). The next year, he started working on a new version from memory that ended up being sixty percent longer than the original. He then edited that version (although it was still a third longer than the original draft), finishing it in early 1922, and had eight copies of it printed to give to friends so they could review it and offer editing suggestions (and to prevent a repeat of losing his only copy). About this time he re-enlisted in the service, but friends convinced him to work on a version he could publish. In 1926, he had a first edition of approximately 200 copies published that included 125 black-and-white and color illustrations from sixteen different artists. The first edition lost money, and it was the only edition published during his lifetime. This edition uses the first edition text and includes all 125 of the original illustrations, including both endpapers.</p>', # noqa: E501
}
],
'tags': [
{
'term': 'Arab countries--History--Arab Revolt, 1916-1918',
'scheme': 'http://purl.org/dc/terms/LCSH',
'label': None,
},
{
'term': 'World War, 1914-1918',
'scheme': 'http://purl.org/dc/terms/LCSH',
'label': None,
},
{
'term': 'Adventure',
'scheme': 'https://standardebooks.org/vocab/subjects',
'label': None,
},
{
'term': 'Memoir',
'scheme': 'https://standardebooks.org/vocab/subjects',
'label': None,
},
{
'term': 'Nonfiction',
'scheme': 'https://standardebooks.org/vocab/subjects',
'label': None,
},
],
'links': [
{
'href': 'https://standardebooks.org/ebooks/t-e-lawrence/seven-pillars-of-wisdom/downloads/cover.jpg',
'rel': 'http://opds-spec.org/image',
'type': 'image/jpeg',
},
{
'href': 'https://standardebooks.org/ebooks/t-e-lawrence/seven-pillars-of-wisdom/downloads/cover-thumbnail.jpg',
'rel': 'http://opds-spec.org/image/thumbnail',
'type': 'image/jpeg',
},
{
'href': 'https://standardebooks.org/ebooks/t-e-lawrence/seven-pillars-of-wisdom',
'rel': 'alternate',
'title': 'This ebook’s page at Standard Ebooks',
'type': 'application/xhtml+xml',
},
{
'href': 'https://standardebooks.org/ebooks/t-e-lawrence/seven-pillars-of-wisdom/downloads/t-e-lawrence_seven-pillars-of-wisdom.epub',
'length': '62070075',
'rel': 'http://opds-spec.org/acquisition/open-access',
'title': 'Recommended compatible epub',
'type': 'application/epub+zip',
},
{
'href': 'https://standardebooks.org/ebooks/t-e-lawrence/seven-pillars-of-wisdom/downloads/t-e-lawrence_seven-pillars-of-wisdom_advanced.epub',
'length': '62221725',
'rel': 'http://opds-spec.org/acquisition/open-access',
'title': 'Advanced epub',
'type': 'application/epub+zip',
},
{
'href': 'https://standardebooks.org/ebooks/t-e-lawrence/seven-pillars-of-wisdom/downloads/t-e-lawrence_seven-pillars-of-wisdom.kepub.epub',
'length': '62135106',
'rel': 'http://opds-spec.org/acquisition/open-access',
'title': 'Kobo Kepub epub',
'type': 'application/kepub+zip',
},
{
'href': 'https://standardebooks.org/ebooks/t-e-lawrence/seven-pillars-of-wisdom/downloads/t-e-lawrence_seven-pillars-of-wisdom.azw3',
'length': '63108449',
'rel': 'http://opds-spec.org/acquisition/open-access',
'title': 'Amazon Kindle azw3',
'type': 'application/x-mobipocket-ebook',
},
],
}
def test_map_data():
assert map_data(SAMPLE_1) == {
"title": "Seven Pillars of Wisdom",
"source_records": ["standard_ebooks:t-e-lawrence/seven-pillars-of-wisdom"],
"publishers": ["Standard Ebooks"],
"publish_date": "2022",
"authors": [{"name": "T. E. Lawrence"}],
"description": SAMPLE_1["content"][0]["value"],
"subjects": [
"Arab countries--History--Arab Revolt, 1916-1918",
"World War, 1914-1918",
"Adventure",
"Memoir",
"Nonfiction",
],
"identifiers": {"standard_ebooks": ["t-e-lawrence/seven-pillars-of-wisdom"]},
"languages": ["eng"],
"cover": "https://standardebooks.org/ebooks/t-e-lawrence/seven-pillars-of-wisdom/downloads/cover.jpg",
}
| 8,414 | Python | .py | 146 | 48.027397 | 2,271 | 0.633131 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
507 | test_solr_updater.py | internetarchive_openlibrary/scripts/tests/test_solr_updater.py | import sys
from unittest.mock import MagicMock
# TODO: Can we remove _init_path someday :(
sys.modules['_init_path'] = MagicMock()
from scripts.solr_updater import parse_log
class TestParseLog:
def test_action_save(self):
sample_record = {
'action': 'save',
'site': 'openlibrary.org',
'timestamp': '2022-04-07T23:03:33.942746',
'data': {
'comment': 'Fixed author redirect',
'key': '/books/OL34239002M',
'query': {
'type': '/type/edition',
'authors': ['/authors/OL9388A'],
'languages': ['/languages/eng'],
'title': 'Romeo and Juliet Annotated',
'works': ['/works/OL362427W'],
'key': '/books/OL34239002M',
},
'result': {'key': '/books/OL34239002M', 'revision': 3},
'changeset': {
'kind': 'update',
'author': {'key': '/people/bluebar'},
'ip': '123.123.123.123',
'comment': 'Fixed author redirect',
'timestamp': '2022-04-07T23:03:33.942746',
'bot': True,
'changes': [{'key': '/books/OL34239002M', 'revision': 3}],
'data': {},
'id': '123456',
'docs': [
{
'type': {'key': '/type/edition'},
'authors': [{'key': '/authors/OL9388A'}],
'languages': [{'key': '/languages/eng'}],
'title': 'Romeo and Juliet Annotated',
'works': [{'key': '/works/OL362427W'}],
'key': '/books/OL34239002M',
'latest_revision': 3,
'revision': 3,
'created': {
'type': '/type/datetime',
'value': '2021-10-01T12:56:46.152576',
},
'last_modified': {
'type': '/type/datetime',
'value': '2022-04-07T23:03:33.942746',
},
}
],
'old_docs': [
{
'type': {'key': '/type/edition'},
'authors': [{'key': '/authors/OL9352911A'}],
'languages': [{'key': '/languages/eng'}],
'title': 'Romeo and Juliet Annotated',
'works': [{'key': '/works/OL362427W'}],
'key': '/books/OL34239002M',
'latest_revision': 2,
'revision': 2,
'created': {
'type': '/type/datetime',
'value': '2021-10-01T12:56:46.152576',
},
'last_modified': {
'type': '/type/datetime',
'value': '2022-02-17T20:17:11.381695',
},
}
],
},
'ip': '123.123.123.123',
'author': '/people/bluebar',
},
}
assert list(parse_log([sample_record], False)) == [
'/books/OL34239002M',
'/authors/OL9352911A',
]
def test_move_edition(self):
sample_record = {
'action': 'save_many',
'site': 'openlibrary.org',
'timestamp': '2022-04-07T23:03:33.942746',
'data': {
'comment': 'FMove edition',
'key': '/books/OL34239002M',
'query': {
'type': '/type/edition',
'authors': ['/authors/OL9388A'],
'languages': ['/languages/eng'],
'title': 'Romeo and Juliet Annotated',
'works': ['/works/OL362427W'],
'key': '/books/OL34239002M',
},
'result': {'key': '/books/OL34239002M', 'revision': 3},
'changeset': {
'kind': 'update',
'author': {'key': '/people/bluebar'},
'ip': '123.123.123.123',
'comment': 'FMove edition',
'timestamp': '2022-04-07T23:03:33.942746',
'bot': False,
'changes': [{'key': '/books/OL34239002M', 'revision': 3}],
'data': {},
'id': '123456',
'docs': [
{
'type': {'key': '/type/edition'},
'authors': [{'key': '/authors/OL9388A'}],
'languages': [{'key': '/languages/eng'}],
'title': 'Romeo and Juliet Annotated',
'works': [{'key': '/works/OL42W'}],
'key': '/books/OL34239002M',
'latest_revision': 3,
'revision': 3,
'created': {
'type': '/type/datetime',
'value': '2021-10-01T12:56:46.152576',
},
'last_modified': {
'type': '/type/datetime',
'value': '2022-04-07T23:03:33.942746',
},
}
],
'old_docs': [
{
'type': {'key': '/type/edition'},
'authors': [{'key': '/authors/OL9388A'}],
'languages': [{'key': '/languages/eng'}],
'title': 'Romeo and Juliet Annotated',
'works': [{'key': '/works/OL362427W'}],
'key': '/books/OL34239002M',
'latest_revision': 2,
'revision': 2,
'created': {
'type': '/type/datetime',
'value': '2021-10-01T12:56:46.152576',
},
'last_modified': {
'type': '/type/datetime',
'value': '2022-02-17T20:17:11.381695',
},
}
],
},
'ip': '123.123.123.123',
'author': '/people/bluebar',
},
}
assert list(parse_log([sample_record], False)) == [
'/books/OL34239002M',
'/works/OL362427W',
]
def test_new_account(self):
sample_record = {
'action': 'save_many',
'site': 'openlibrary.org',
'timestamp': '2022-03-29T00:00:07.835173',
'data': {
'comment': 'Created new account.',
'query': [
{
'key': '/people/foobar',
'type': {'key': '/type/user'},
'displayname': 'foobar',
'permission': {'key': '/people/foobar/permission'},
},
{
'key': '/people/foobar/usergroup',
'type': {'key': '/type/usergroup'},
'members': [{'key': '/people/foobar'}],
},
{
'key': '/people/foobar/permission',
'type': {'key': '/type/permission'},
'readers': [{'key': '/usergroup/everyone'}],
'writers': [{'key': '/people/foobar/usergroup'}],
'admins': [{'key': '/people/foobar/usergroup'}],
},
],
'result': [
{'key': '/people/foobar', 'revision': 1},
{'key': '/people/foobar/usergroup', 'revision': 1},
{'key': '/people/foobar/permission', 'revision': 1},
],
'changeset': {
'kind': 'new-account',
'author': {'key': '/people/foobar'},
'ip': '123.123.123.123',
'comment': 'Created new account.',
'timestamp': '2022-03-29T00:00:07.835173',
'bot': False,
'changes': [
{'key': '/people/foobar', 'revision': 1},
{'key': '/people/foobar/usergroup', 'revision': 1},
{'key': '/people/foobar/permission', 'revision': 1},
],
'data': {},
'id': '123456',
'docs': [
{
'key': '/people/foobar',
'type': {'key': '/type/user'},
'displayname': 'foobar',
'permission': {'key': '/people/foobar/permission'},
'latest_revision': 1,
'revision': 1,
'created': {
'type': '/type/datetime',
'value': '2022-03-29T00:00:07.835173',
},
'last_modified': {
'type': '/type/datetime',
'value': '2022-03-29T00:00:07.835173',
},
},
{
'key': '/people/foobar/usergroup',
'type': {'key': '/type/usergroup'},
'members': [{'key': '/people/foobar'}],
'latest_revision': 1,
'revision': 1,
'created': {
'type': '/type/datetime',
'value': '2022-03-29T00:00:07.835173',
},
'last_modified': {
'type': '/type/datetime',
'value': '2022-03-29T00:00:07.835173',
},
},
{
'key': '/people/foobar/permission',
'type': {'key': '/type/permission'},
'readers': [{'key': '/usergroup/everyone'}],
'writers': [{'key': '/people/foobar/usergroup'}],
'admins': [{'key': '/people/foobar/usergroup'}],
'latest_revision': 1,
'revision': 1,
'created': {
'type': '/type/datetime',
'value': '2022-03-29T00:00:07.835173',
},
'last_modified': {
'type': '/type/datetime',
'value': '2022-03-29T00:00:07.835173',
},
},
],
'old_docs': [None, None, None],
},
'ip': '123.123.123.123',
'author': '/people/foobar',
},
}
assert list(parse_log([sample_record], False)) == [
'/people/foobar',
'/people/foobar/usergroup',
'/people/foobar/permission',
]
| 12,125 | Python | .py | 265 | 22.909434 | 79 | 0.328158 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
508 | test_partner_batch_imports.py | internetarchive_openlibrary/scripts/tests/test_partner_batch_imports.py | from datetime import datetime
import pytest
from ..partner_batch_imports import (
Biblio,
is_low_quality_book,
is_published_in_future_year,
)
csv_row = "USA01961304|0962561851||9780962561856|AC|I|TC||B||Sutra on Upasaka Precepts|The||||||||2006|20060531|Heng-ching, Shih|TR||||||||||||||226|ENG||0.545|22.860|15.240|||||||P|||||||74474||||||27181|USD|30.00||||||||||||||||||||||||||||SUTRAS|BUDDHISM_SACRED BOOKS|||||||||REL007030|REL032000|||||||||HRES|HRG|||||||||RB,BIP,MIR,SYN|1961304|00|9780962561856|67499962||PRN|75422798|||||||BDK America||1||||||||10.1604/9780962561856|91-060120||20060531|||||REL007030||||||" # noqa: E501
csv_row_with_full_date = "USA01961304|0962561851||9780962561856|AC|I|TC||B||Sutra on Upasaka Precepts|The||||||||20060531|20060531|Heng-ching, Shih|TR||||||||||||||226|ENG||0.545|22.860|15.240|||||||P|||||||74474||||||27181|USD|30.00||||||||||||||||||||||||||||SUTRAS|BUDDHISM_SACRED BOOKS|||||||||REL007030|REL032000|||||||||HRES|HRG|||||||||RB,BIP,MIR,SYN|1961304|00|9780962561856|67499962||PRN|75422798|||||||BDK America||1||||||||10.1604/9780962561856|91-060120||20060531|||||REL007030||||||" # noqa: E501
non_books = [
"USA39372027|8866134201||9788866134206|AC|I|DI||||Moleskine Cahier Journal (Set of 3), Pocket, Ruled, Pebble Grey, Soft Cover (3. 5 X 5. 5)||||||||||20120808|Moleskine|AU|X|||||||||||||64|ENG||0.126|13.970|8.890|1.270|X|||||T|||Cahier Journals||||1161400||||||333510|USD|9.95||||||||||||||||||||||||||||||||||||||||||||||||||||||||||BIP,OTH|39372027|01|9788866134206|69270822|||29807389||||||2328606|Moleskine||1|||||||||||||||NO|NON000000|||WZS|||", # noqa: E501
"AUS52496256|1452145865||9781452145860|AC|I|ZZ||||People Pencils : 10 Graphite Pencils||||||||||20160501|Sukie|AU|X|||||||||||||10|ENG||0.170|19.685|8.890|2.235|X|||||T|||||||5882||||||1717043|AUD|24.99||||||||||||||||||||||||||||NON-CLASSIFIABLE||||||||||NON000000||||||||||||||||||||BIP,OTH|52496256|02|9781452145860|51743426|||48922851|||||||Chronicle Books LLC||80|||||||||||||||NO|ART048000|||AFH|WZS||", # noqa: E501
"AUS49413469|1423638298||9781423638292|AC|I|ZZ||O||Keep Calm and Hang on Mug|||1 vol.|||||||20141201|Gibbs Smith Publisher Staff|DE|X||||||||||||||ENG||0.350|7.620|9.322|9.449||||||T|||||||20748||||||326333|AUD|22.99||||||||||||||||||||||||||||||||||||||||||||||||||||||||||BIP,OTH|49413469||9781423638292|50573089||OTH|1192128|||||||Gibbs Smith, Publisher||7||||||||||||||||NON000000|||WZ|||", # noqa: E501
"USA52681473|0735346623||9780735346628|AC|I|TY||||Klimt Expectation 500 Piece Puzzle||||||||||20160119|Galison|AU|X|Klimt, Gustav|AT|||||||||||500|ENG||0.500|20.000|20.100|5.500|X|||||T|||||||10300||||||333510|USD|13.99||||||||||||||||||||||||||||||||||||||||||||||||||||||||||BIP,OTH|52681473|28|9780735346628|70053633|||32969171|773245||||||Galison||20|||||||||||20160119||||YES|NON000000|||WZS|||", # noqa: E501
"AUS49852633|1423639103||9781423639107|US|I|TS||||I Like Big Books T-Shirt X-Large|||1 vol.|||||||20141201|Gibbs Smith, Publisher|DE|X||||||||||||||ENG||0.280|27.940|22.860|2.540||||||T|||||||20748||||||326333|AUD|39.99||||||||||||||||||||||||||||||||||||||||||||||||||||||||||BIP,OTH|49852633|35|9781423639107|49099247|||19801468|||||||Gibbs Smith, Publisher||1||||||||||||||||NON000000|||WZ|||", # noqa: E501
]
class TestBiblio:
def test_sample_csv_row(self):
b = Biblio(csv_row.strip().split('|'))
data = {
'title': 'Sutra on Upasaka Precepts',
'isbn_13': ['9780962561856'],
'publish_date': '2006',
'publishers': ['BDK America'],
'weight': '0.545',
'authors': [{'name': 'Heng-ching, Shih'}],
'number_of_pages': 226,
'languages': ['eng'],
'subjects': ['Sutras', 'Buddhism, sacred books'],
'source_records': ['bwb:9780962561856'],
'identifiers': {
'doi': ['10.1604/9780962561856'],
},
'lccn': ['91-060120'],
}
assert b.json() == data
def test_sample_csv_row_with_full_date(self):
"""Only import the year. The date in the input here is in YYYYMMDD format, but should come out YYYY."""
b = Biblio(csv_row.strip().split('|'))
data = {
'title': 'Sutra on Upasaka Precepts',
'isbn_13': ['9780962561856'],
'publish_date': '2006',
'publishers': ['BDK America'],
'weight': '0.545',
'authors': [{'name': 'Heng-ching, Shih'}],
'number_of_pages': 226,
'languages': ['eng'],
'subjects': ['Sutras', 'Buddhism, sacred books'],
'source_records': ['bwb:9780962561856'],
'identifiers': {
'doi': ['10.1604/9780962561856'],
},
'lccn': ['91-060120'],
}
assert b.json() == data
@pytest.mark.parametrize('input_', non_books)
def test_non_books_rejected(self, input_):
data = input_.strip().split('|')
code = data[6]
pclass = data[121]
with pytest.raises(AssertionError, match=f'{code}/{pclass} is NONBOOK'):
_ = Biblio(data)
def test_is_low_quality_book():
book = {"title": "A NoTeBoOk Z", "authors": [{"name": "Al"}, {"name": "Zach"}]}
assert is_low_quality_book(book) is False, book
book["authors"] = [{"name": "Al"}, {"name": "hOlO"}, {"name": "Zach"}]
assert is_low_quality_book(book) is True, book
book["title"] = "A NoTe-BoOk Z"
assert is_low_quality_book(book) is True, book
book = {"title": "NOTEBOOK", "authors": [{"name": "pickleball publishing"}]}
assert is_low_quality_book(book) is True, book
book["authors"] = [
{"name": "hol"},
{"name": "mad"},
{"name": "mazz"},
{"name": "mikemi"},
{"name": "tobias publishers"},
]
assert is_low_quality_book(book) is False, book
book["authors"] = [
{"name": "razal"},
{"name": "tobias publishing"},
{"name": "koraya"},
{"name": "pickleball"},
{"name": "d"},
]
assert is_low_quality_book(book) is True, book
book = {
"title": "A aNNotaTEd Z",
"publishers": ["Independently Published"],
"publish_date": "2017-09-01T05:14:17",
}
assert is_low_quality_book(book) is False, book
book["publish_date"] = "2018"
assert is_low_quality_book(book) is True, book
book["publishers"] = ["Independently Publish"]
assert is_low_quality_book(book) is False, book
book["publishers"] += ["Independently Published"]
assert is_low_quality_book(book) is True, book
book["title"] = "A aNNotaTE Z"
assert is_low_quality_book(book) is False, book
assert is_low_quality_book(
{
'title': 'A tale of two cities (annotated)',
'publish_date': '2020',
'publishers': ['Independently Published'],
}
)
def test_is_published_in_future_year() -> None:
last_year = str(datetime.now().year - 1)
last_year_book = {'publish_date': last_year}
assert is_published_in_future_year(last_year_book) is False
this_year = str(datetime.now().year)
this_year_book = {'publish_date': this_year}
assert is_published_in_future_year(this_year_book) is False
next_year = str(datetime.now().year + 1)
next_year_book = {'publish_date': next_year}
assert is_published_in_future_year(next_year_book) is True
# No publication year
no_year_book = {'publish_date': '0'}
assert is_published_in_future_year(no_year_book) is False
| 7,608 | Python | .py | 122 | 54.483607 | 510 | 0.567832 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
509 | test_copydocs.py | internetarchive_openlibrary/scripts/tests/test_copydocs.py | from __future__ import annotations
from ..copydocs import copy, KeyVersionPair
class TestKeyVersionPair:
def test_from_uri(self):
pair = KeyVersionPair.from_uri('/works/OL1W?v=7')
assert pair.key == '/works/OL1W'
assert pair.version == '7'
pair = KeyVersionPair.from_uri('/authors/OL1A')
assert pair.key == '/authors/OL1A'
assert pair.version is None
def test_to_uri(self):
make = KeyVersionPair._make
assert make(['/authors/OL1A', None]).to_uri() == '/authors/OL1A'
assert make(['/works/OL1W', '7']).to_uri() == '/works/OL1W?v=7'
class FakeServer:
"""Mimics OpenLibrary's API class"""
def __init__(self, docs: list[dict]):
"""
:param list[dict] docs:
"""
self.db: dict = {} # Mapping of key to (Mp of revision to doc)
self.save_many(docs)
def get(self, key: str, revision: int | None = None) -> dict | None:
"""
:param str key:
:param int or None revision:
:return: dict or None
"""
revisions = self.db.get(key, {})
if revision is None and len(revisions) > 0:
return max(list(revisions.values()), key=lambda d: d['revision'])
else:
return revisions.get(revision, None)
def get_many(self, keys: list[str], max_length: int = 500) -> dict:
"""
:param list of str keys:
:return: Map of key to document
"""
result = {}
for k in keys:
if k in self.db:
result[k] = self.get(k)
return result
def save_many(self, docs: list[dict], comment: str | None = None) -> None:
"""
:param list[dict] docs:
:param str or None comment:
"""
for doc in docs:
key = doc['key']
revision = doc['revision']
if key not in self.db:
self.db[key] = {}
self.db[doc['key']][revision] = doc
class TestCopy:
def setup_method(self, method):
self.docs = [
{'key': '/works/OL1W', 'revision': 1, 'type': {'key': '/type/work'}},
{'key': '/works/OL1W', 'revision': 2, 'type': {'key': '/type/work'}},
{'key': '/works/OL1W', 'revision': 3, 'type': {'key': '/type/work'}},
{'key': '/books/OL2M', 'revision': 1, 'type': {'key': '/type/edition'}},
]
self.src = FakeServer(self.docs)
self.dest = FakeServer([])
def test_basic_copy(self):
copy(self.src, self.dest, ['/books/OL2M'], 'asdf')
assert self.dest.get('/books/OL2M') == self.src.get('/books/OL2M')
assert len(self.dest.db) == 1
def test_default_get_gets_latest_version(self):
copy(self.src, self.dest, ['/works/OL1W'], 'asdf')
assert self.dest.get('/works/OL1W') == self.src.get('/works/OL1W', 3)
assert len(self.dest.db) == 1
# Note revision would be 1 in the dest in actuality
def test_getting_specific_version(self):
copy(self.src, self.dest, ['/works/OL1W?v=1'], 'asdf')
assert self.dest.get('/works/OL1W') == self.src.get('/works/OL1W', 1)
assert len(self.dest.db) == 1
| 3,196 | Python | .py | 77 | 32.61039 | 84 | 0.553658 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
510 | issue_comment_bot.py | internetarchive_openlibrary/scripts/gh_scripts/issue_comment_bot.py | #!/usr/bin/env python
"""
Fetches Open Library GitHub issues that have been commented on
within some amount of time, in hours.
If called with a Slack token and channel, publishes a digest of
the issues that were identified to the given channel.
Adds the "Needs: Response" label to the issues in Github.
"""
import argparse
import errno
import json
import os
import sys
import time
from datetime import datetime, timedelta
from typing import Any
import requests
github_headers = {
'X-GitHub-Api-Version': '2022-11-28',
'Accept': 'application/vnd.github+json',
}
# Custom Exceptions:
class AuthenticationError(Exception):
# Raised when a required authentication token is missing from the environment.
pass
class ConfigurationError(Exception):
# Raised when reading the configuration goes wrong in some way.
pass
def fetch_issues():
"""
Fetches and returns all open issues and pull requests from the `internetarchive/openlibrary` repository.
GitHub API results are paginated. This functions appends each result to a list, and does so for all pages.
To keep API calls to a minimum, we request the maximum number of results per request (100 per page, as of writing).
Calls to fetch issues from Github are considered critical, and a failure of any such call will cause the script to
fail fast.
"""
# Make initial query for open issues:
p = {'state': 'open', 'per_page': 100}
response = requests.get(
'https://api.github.com/repos/internetarchive/openlibrary/issues',
params=p,
headers=github_headers,
)
d = response.json()
if response.status_code != 200:
print('Initial request for issues has failed.')
print(f'Message: {d.get("message", "")}')
print(f'Documentation URL: {d.get("documentation_url", "")}')
response.raise_for_status()
results = d
# Fetch additional updated issues, if any exist
def get_next_page(url: str):
"""Returns list of issues and optional url for next page"""
# Get issues
resp = requests.get(url, headers=github_headers)
d = resp.json()
if resp.status_code != 200:
print('Request for next page of issues has failed.')
print(f'Message: {d.get("message", "")}')
print(f'Documentation URL: {d.get("documentation_url", "")}')
response.raise_for_status()
issues = d
# Prepare url for next page
next = resp.links.get('next', {})
next_url = next.get('url', '')
return issues, next_url
links = response.links
next = links.get('next', {})
next_url = next.get('url', '')
while next_url:
# Wait one second...
time.sleep(1)
# ...then, make call for more issues with next link
issues, next_url = get_next_page(next_url)
results = results + issues
return results
def filter_issues(issues: list, hours: int, leads: list[dict[str, str]]):
"""
Returns list of issues that have the following criteria:
- Are issues, not pull requests
- Issues have at least one comment
- Issues have been last updated since the given number of hours
- Latest comment is not from an issue lead
Checking who left the last comment requires making up to two calls to
GitHub's REST API.
"""
def log_api_failure(_resp):
print(f'Failed to fetch comments for issue #{i["number"]}')
print(f'URL: {i["html_url"]}')
_d = _resp.json()
print(f'Message: {_d.get("message", "")}')
print(f'Documentation URL: {_d.get("documentation_url", "")}')
results = []
since, date_string = time_since(hours)
# Filter out as many issues as possible before making API calls for comments:
prefiltered_issues = []
for i in issues:
updated = datetime.fromisoformat(i['updated_at'])
updated = updated.replace(tzinfo=None)
if updated < since:
# Issues is stale
continue
if i.get('pull_request', {}):
# Issue is actually a pull request
continue
if i['comments'] == 0:
# Issue has no comments
continue
prefiltered_issues.append(i)
print(f'{len(prefiltered_issues)} issues remain after initial filtering.')
print('Filtering out issues with stale comments...')
for i in prefiltered_issues:
# Wait one second
time.sleep(1)
# Fetch comments using URL from previous GitHub search results
comments_url = i.get('comments_url')
resp = requests.get(comments_url, headers=github_headers)
if resp.status_code != 200:
log_api_failure(resp)
# XXX : Somehow, notify Slack of error
continue
# Ensure that we have the last page of comments
links = resp.links
last = links.get('last', {})
last_url = last.get('url', '')
if last_url:
resp = requests.get(last_url, headers=github_headers)
if resp.status_code != 200:
log_api_failure(resp)
# XXX : Somehow, notify Slack of error
continue
# Get last comment
comments = resp.json()
if not comments:
continue
last_comment = comments[-1]
# Determine if last comment meets our criteria for Slack notifications
# First step: Ensure that the last comment was left after the given `since` datetime
created = datetime.fromisoformat(last_comment['created_at'])
# Removing timezone info to avoid TypeErrors, which occur when
# comparing a timezone-aware datetime with a timezone-naive datetime
created = created.replace(tzinfo=None)
if created > since:
# Next step: Determine if the last commenter is a lead
last_commenter = last_comment['user']['login']
if last_commenter not in [lead['githubUsername'] for lead in leads]:
lead_label = find_lead_label(i.get('labels', []))
results.append(
{
'number': i['number'],
'comment_url': last_comment['html_url'],
'commenter': last_commenter,
'issue_title': i['title'],
'lead_label': lead_label,
}
)
return results
def find_lead_label(labels: list[dict[str, Any]]) -> str:
"""
Finds and returns the name of the first lead label found in the given list of GitHub labels.
Returns an empty string if no lead label is found
"""
result = ''
for label in labels:
if label['name'].startswith('Lead:'):
result = label['name']
break
return result
def publish_digest(
issues: list[dict[str, str]],
slack_channel: str,
hours_passed: int,
leads: list[dict[str, str]],
all_issues_labeled: bool,
):
"""
Creates a threaded Slack messaged containing a digest of recently commented GitHub issues.
Parent Slack message will say how many comments were left, and the timeframe. Each reply
will include a link to the comment, as well as additional information.
"""
def post_message(payload: dict[str, str]):
return requests.post(
'https://slack.com/api/chat.postMessage',
headers={
'Authorization': f"Bearer {os.environ.get('SLACK_TOKEN', '')}",
'Content-Type': 'application/json; charset=utf-8',
},
json=payload,
)
# Create the parent message
parent_thread_msg = (
f'{len(issues)} new GitHub comment(s) since {hours_passed} hour(s) ago'
)
response = post_message(
{
'channel': slack_channel,
'text': parent_thread_msg,
}
)
if response.status_code != 200:
print(f'Failed to send message to Slack. Status code: {response.status_code}')
sys.exit(errno.ECOMM)
d = response.json()
if not d.get('ok', True):
print(f'Slack request not ok. Error message: {d.get("error", "")}')
# Store timestamp, which, along with the channel, uniquely identifies the parent thread
ts = d.get('ts')
for i in issues:
# Slack rate limit is roughly 1 request per second
time.sleep(1)
comment_url = i['comment_url']
issue_title = i['issue_title']
commenter = i['commenter']
message = f'<{comment_url}|Latest comment for: *{issue_title}*>\n'
username = next(
(
lead['githubUsername']
for lead in leads
if lead['leadLabel'] == i['lead_label']
),
'',
)
slack_id = username and next(
(
lead['slackId'] # type: ignore[syntax]
for lead in leads
if lead['leadLabel'] == f'Lead: @{username}'
),
'',
)
if slack_id:
message += f'Lead: {slack_id}\n'
elif i['lead_label']:
message += f'{i["lead_label"]}\n'
else:
message += 'Unknown lead\n'
message += f'Commenter: *{commenter}*'
r = post_message(
{
'channel': slack_channel,
'text': message,
'thread_ts': ts,
}
)
if r.status_code != 200:
print(f'Failed to send message to Slack. Status code: {r.status_code}')
else:
d = r.json()
if not d.get('ok', True):
print(f'Slack request not ok. Error message: {d.get("error", "")}')
if not all_issues_labeled:
r = post_message(
{
'channel': slack_channel,
'text': (
'Warning: some issues were not labeled "Needs: Response". '
'See the <https://github.com/internetarchive/openlibrary/actions/workflows/new_comment_digest.yml|log files> for more information.'
),
}
)
def time_since(hours):
"""Returns datetime and string representations of the current time, minus the given hour"""
now = datetime.now()
# XXX : Add a minute or two to the delta (to avoid dropping issues)?
since = now - timedelta(hours=hours)
return since, since.strftime('%Y-%m-%dT%H:%M:%S')
def add_label_to_issues(issues) -> bool:
all_issues_labeled = True
for issue in issues:
# GitHub recommends waiting at least one second between mutative requests
time.sleep(1)
issue_labels_url = f"https://api.github.com/repos/internetarchive/openlibrary/issues/{issue['number']}/labels"
response = requests.post(
issue_labels_url,
json={"labels": ["Needs: Response"]},
headers=github_headers,
)
if response.status_code != 200:
all_issues_labeled = False
print(
f'Failed to label issue #{issue["number"]} --- status code: {response.status_code}'
)
print(issue_labels_url)
return all_issues_labeled
def verbose_output(issues):
"""
Prints detailed information about the given issues.
"""
for issue in issues:
print(f'Issue #{issue["number"]}:')
print(f'\tTitle: {issue["issue_title"]}')
print(f'\t{issue["lead_label"]}')
print(f'\tCommenter: {issue["commenter"]}')
print(f'\tComment URL: {issue["comment_url"]}')
def read_config(config_path):
with open(config_path, encoding='utf-8') as f:
return json.load(f)
def token_verification(slack_channel: str = ''):
"""
Checks that the tokens required for this job to run are available in the environment.
A GitHub token is always required. A Slack token is required only if a `slack_channel` is specified.
:param slack_channel: Channel to publish the digest. Publish step is skipped if this is an empty string.
:raises AuthenticationError: When required token is missing from the environment.
"""
if not os.environ.get('GITHUB_TOKEN', ''):
raise AuthenticationError('Required GitHub token not found in environment.')
if slack_channel and not os.environ.get('SLACK_TOKEN', ''):
raise AuthenticationError(
'Slack token must be included in environment if Slack channel is provided.'
)
def start_job():
"""
Starts the new comment digest job.
"""
# Process command-line arguments and starts the notification job
parser = _get_parser()
args = parser.parse_args()
print('Checking for required tokens...')
token_verification(args.slack_channel)
github_headers['Authorization'] = f"Bearer {os.environ.get('GITHUB_TOKEN', '')}"
try:
print('Reading configuration file...')
config = read_config(args.config)
leads = config.get('leads', [])
except (OSError, json.JSONDecodeError):
raise ConfigurationError(
'An error occurred while parsing the configuration file.'
)
print('Fetching issues from GitHub...')
issues = fetch_issues()
print(f'{len(issues)} found')
print('Filtering issues...')
filtered_issues = filter_issues(issues, args.hours, leads)
print(f'{len(filtered_issues)} remain after filtering.')
all_issues_labeled = True
if not args.no_labels:
print('Labeling issues as "Needs: Response"...')
all_issues_labeled = add_label_to_issues(filtered_issues)
if not all_issues_labeled:
print('Failed to label some issues')
if args.slack_channel:
print('Publishing digest to Slack...')
publish_digest(
filtered_issues, args.slack_channel, args.hours, leads, all_issues_labeled
)
if args.verbose:
verbose_output(filtered_issues)
def _get_parser() -> argparse.ArgumentParser:
"""
Creates and returns an ArgumentParser containing default values which were
read from the config file.
"""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'hours',
help='Fetch issues that have been updated since this many hours ago',
type=int,
)
parser.add_argument(
'-c',
'--config',
help="Path to configuration file",
type=str,
)
parser.add_argument(
'-s',
'--slack-channel',
help="Issues will be published to this Slack channel. Publishing to Slack will be skipped if this argument is missing, or is an empty string",
type=str,
)
parser.add_argument(
'--no-labels',
help='Prevent the script from labeling the issues',
action='store_true',
)
parser.add_argument(
'-v',
'--verbose',
help='Print detailed information about the issues that were found',
action='store_true',
)
return parser
if __name__ == '__main__':
try:
print('Starting job...')
start_job()
print('Job completed successfully.')
except AuthenticationError as e:
# If a required token is missing from the environment, fail fast
print(e)
sys.exit(10)
except ConfigurationError as e:
# If the configuration file cannot be read or unmarshalled, fail fast
print(e)
sys.exit(20)
except requests.exceptions.HTTPError as e:
# Fail fast if we fail to fetch issues from GitHub
print(e)
sys.exit(30)
| 15,659 | Python | .py | 399 | 30.814536 | 151 | 0.61318 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
511 | shownames.py | internetarchive_openlibrary/scripts/obfi/shownames.py | #!/usr/bin/env python3
# this matches obscured IPs, resolves them if possible, and reveals the
# resolved host in []s
# use: cat /var/log/lighttpd/current-access.log | reveal | shownames
import re
import socket
import sys
from re import Match
def add_name(match: Match) -> str:
ip = match.group(2)
if ip[0:2] == "0.":
name: str | None = None
else:
try:
name = socket.gethostbyaddr(ip)[0]
except: # noqa E722
name = None
if name:
return match.group(1) + name + "[" + ip + "]" + match.group(3)
else:
return match.group(1) + ip + match.group(3)
def run() -> None:
line = sys.stdin.readline()
ip_pattern = re.compile(r"([^\d]?)(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})([^\d]?)")
while line:
named = ip_pattern.sub(add_name, line.rstrip())
print(named)
line = sys.stdin.readline()
if __name__ == "__main__":
run()
| 940 | Python | .py | 30 | 25.833333 | 84 | 0.579533 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
512 | mktable.py | internetarchive_openlibrary/scripts/obfi/mktable.py | #!/usr/bin/env python3
# matches ip#'s from input, builds reverse table to unhide hidden ips
# use:
# sudo tcpdump -n (dst port 80 or dst port 443) | ./mktable
# leave running .. reveal uses the table
# or netstat -n | ./mktable
#
# or
# sudo tcpdump -n dst port 80 and 'tcp[tcpflags] & tcp-syn != 0' | ./mktable
#
# Exit with control+c.
import dbm.ndbm
import hashlib
import os
import re
import struct
import sys
import time
from typing import Final
import urllib.request
SEED_PATH: Final = os.getenv("SEED_PATH", "")
if not SEED_PATH:
print("Set $SEED_PATH to the URL of seed.txt")
sys.exit(1)
class HashIP:
"""
A class to hash IP addresses and store the real <-> obfuscated IP
in a map file. Every day the map file changes.
"""
def __init__(self, real_ip_prefix: str = "/var/tmp/fast/hide_ip_map_") -> None:
self.real_ip_prefix = real_ip_prefix
self.seed = b""
self.yday = time.gmtime()[7]
self.set_db()
self.get_seed()
def set_db(self) -> None:
"""Set the database."""
# Catching file-locking errors makes testing easier.
try:
self.real_ips = dbm.ndbm.open(self.real_ip_prefix + str(self.yday), "c")
except dbm.ndbm.error as e:
if "Resource temporarily unavailable" in str(e):
pass
else:
raise e
def get_seed(self) -> None:
"""Get the day's seed."""
try:
with urllib.request.urlopen(SEED_PATH) as handle:
content = handle.read()
except Exception as e: # noqa: BLE001
print("Error retrieving seed:", e)
sys.exit(1)
_, seed = content.split(b"=")
seed = seed.rstrip()
self.seed = seed
self.yday = time.gmtime()[7]
def hide(self, ip: str) -> str:
"""
Obfuscate an IP address. Each day, trigger a new seed change so
the obfuscation map file is renamed.
"""
# rekey?
if self.yday != time.gmtime()[7]:
self.get_seed()
m = hashlib.md5()
m.update(self.seed + ip.encode("utf-8"))
bin_md5 = m.digest()
return "0.%d.%d.%d" % struct.unpack_from("BBB", bin_md5)
def process_input(self) -> None:
"""
Read input from STDIN. When an IP is hidden, the original and
obfuscated IPs are printed to STDOUT. If an IP is already
obfuscated for the day, it is not printed to STDOUT.
"""
count = 0
line = sys.stdin.readline()
try:
while line:
ips = re.findall(
r"[^\d]?(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})[^\d]?", line
)
for ip in ips:
if (hidden := self.hide(ip)) not in self.real_ips: # type: ignore [operator]
count += 1
self.real_ips[hidden] = ip
# Every 10th IP, flush the DB to disk
if count % 10 == 0:
self.real_ips.close()
self.set_db()
print(ip, hidden)
line = sys.stdin.readline()
except KeyboardInterrupt:
self.real_ips.close()
def main():
hash_ip = HashIP()
hash_ip.process_input()
if __name__ == "__main__":
main()
| 3,396 | Python | .py | 99 | 25.353535 | 97 | 0.542048 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
513 | reveal.py | internetarchive_openlibrary/scripts/obfi/reveal.py | #!/usr/bin/env python3
# this matches obscured ip's and reveals them in ()'s
# use: cat /var/log/lighttpd/current-access.log | reveal
import dbm.ndbm
import re
import sys
import time
class IPRevealer:
"""A class to reveal obscured IP addresses obscured by hide.py."""
def __init__(self, real_ips, replace: bool):
self.real_ips = real_ips
self.replace = replace
def make_real(self, match: re.Match) -> str:
"""Replace the obscured IP with the real IP or append it in parentheses."""
hidden = match.group(2)
if hidden in self.real_ips:
if self.replace:
return match.group(1) + self.real_ips[hidden].decode() + match.group(3)
else:
return (
match.group(1)
+ hidden
+ "("
+ self.real_ips[hidden].decode()
+ ")"
+ match.group(3)
)
else:
return match.group(1) + hidden + match.group(3)
def run(self) -> None:
"""Read lines from STDIN and print any associated revealed IPs."""
line = sys.stdin.readline()
while line:
revealed = re.sub(
r"([^\d]?)(0\.\d{1,3}\.\d{1,3}\.\d{1,3})([^\d]?)",
self.make_real,
line.rstrip(),
)
print(revealed)
sys.stdout.flush()
line = sys.stdin.readline()
def get_real_ips_file_path() -> str:
"""Construct the real IPs file path."""
# real_ips = dbm.open('/var/tmp/hide_ip_map_' + str(time.gmtime()[7]), 'r')
return f"/var/tmp/fast/hide_ip_map_{time.gmtime()[7]!s}"
if __name__ == "__main__":
with dbm.ndbm.open(get_real_ips_file_path(), "r") as real_ips:
replace = len(sys.argv) > 1 and sys.argv[1] == "replace"
ip_revealer = IPRevealer(real_ips, replace)
ip_revealer.run()
| 1,947 | Python | .py | 50 | 28.9 | 87 | 0.535809 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
514 | hide.py | internetarchive_openlibrary/scripts/obfi/hide.py | #!/usr/bin/env python3
# use: hide ip#
# prints hashed ip using current key
import hashlib
import os
import struct
import sys
from typing import Final
import urllib.request
SEED_PATH: Final = os.getenv("SEED_PATH", "")
if not SEED_PATH:
print("Set $SEED_PATH to the URL of seed.txt")
sys.exit(1)
class HashIP:
"""
A class to hash IP an IP address based on a seed that changes once per day.
"""
def __init__(self) -> None:
self.seed = b""
self.get_seed()
def get_seed(self) -> None:
"""Get the day's seed."""
try:
with urllib.request.urlopen(SEED_PATH) as handle:
content = handle.read()
except Exception as e: # noqa: BLE001
print("Error retrieving seed:", e)
sys.exit(1)
_, seed = content.split(b"=")
seed = seed.rstrip()
self.seed = seed
def hide(self, ip: str) -> str:
"""Obfuscate the IP address"""
m = hashlib.md5()
m.update(self.seed + ip.encode("utf-8"))
bin_md5 = m.digest()
return "0.%d.%d.%d" % struct.unpack_from("BBB", bin_md5)
if __name__ == "__main__":
hash_ip = HashIP()
print(hash_ip.hide(sys.argv[1]))
| 1,228 | Python | .py | 40 | 24.5 | 79 | 0.586735 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
515 | setup.py | internetarchive_openlibrary/scripts/solr_builder/setup.py | from pathlib import Path
from setuptools import setup
from Cython.Build import cythonize
setup(
py_modules=['solr_builder'],
ext_modules=cythonize(
str(Path(__file__).parent / "solr_builder" / "solr_builder.py"),
compiler_directives={'language_level': "3"},
),
)
| 292 | Python | .py | 10 | 25.3 | 72 | 0.686833 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
516 | Dockerfile.olpython | internetarchive_openlibrary/scripts/solr_builder/Dockerfile.olpython | FROM openlibrary/olbase:latest
ENV PYTHONPATH=/openlibrary:/openlibrary/vendor/infogami
USER root
COPY requirements*.txt ./
RUN pip install -r requirements_test.txt
WORKDIR /openlibrary/scripts/solr_builder
RUN pip install \
# For real-time profiling
cprofilev \
# Faster python
Cython==3.0.6
# Build cython files
COPY . /openlibrary
RUN ./build-cython.sh
EXPOSE 4000
| 390 | Python | .py | 15 | 23.533333 | 56 | 0.791328 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
517 | test_fn_to_cli.py | internetarchive_openlibrary/scripts/solr_builder/tests/test_fn_to_cli.py | from argparse import BooleanOptionalAction
from pathlib import Path
import typing
from scripts.solr_builder.solr_builder.fn_to_cli import FnToCLI
class TestFnToCLI:
def test_full_flow(self):
def fn(works: list[str], solr_url: str | None = None):
"""
Do some magic!
:param works: These are works
:param solr_url: This is solr url
"""
cli = FnToCLI(fn)
assert cli.parser.description.strip() == 'Do some magic!'
assert '--solr-url' in cli.parser.format_usage()
def test_parse_docs(self):
docs = """
:param a: A
:param b: B
:param c: C
"""
assert FnToCLI.parse_docs(docs) == {'a': 'A', 'b': 'B', 'c': 'C'}
docs = """
Some function description
:param a: A asdfas
"""
assert FnToCLI.parse_docs(docs) == {'a': 'A asdfas'}
def test_type_to_argparse(self):
assert FnToCLI.type_to_argparse(int) == {'type': int}
assert FnToCLI.type_to_argparse(typing.Optional[int]) == { # noqa: UP007
'type': int
}
assert FnToCLI.type_to_argparse(bool) == {
'type': bool,
'action': BooleanOptionalAction,
}
assert FnToCLI.type_to_argparse(typing.Literal['a', 'b']) == {
'choices': ('a', 'b'),
}
def test_is_optional(self):
assert FnToCLI.is_optional(typing.Optional[int]) # noqa: UP007
assert not FnToCLI.is_optional(int)
def test_lists(self):
def fn(nums: list[int]):
return sum(nums)
cli = FnToCLI(fn)
cli.parse_args(['1', '2', '3'])
assert cli.run() == 6
def test_paths(self):
def fn(files: list[Path] | None = None):
if not files:
return None
return [isinstance(f, Path) for f in files]
cli = FnToCLI(fn)
cli.parse_args(['--files', 'path1', 'path2'])
assert cli.run() == [True, True]
cli.parse_args([])
assert cli.run() is None
| 2,077 | Python | .py | 58 | 26.724138 | 81 | 0.548355 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
518 | solr_builder.py | internetarchive_openlibrary/scripts/solr_builder/solr_builder/solr_builder.py | from __future__ import annotations
import json
import logging
from pathlib import Path
import time
import uuid
from collections import namedtuple
from collections.abc import Awaitable, Iterator
from configparser import ConfigParser
from typing import Any, Literal, Self
import aiofiles
import psycopg2
from openlibrary.core.bookshelves import Bookshelves
from openlibrary.core.ratings import Ratings, WorkRatingsSummary
from openlibrary.solr import update
from openlibrary.solr.data_provider import DataProvider, WorkReadingLogSolrSummary
from openlibrary.solr.update import load_configs, update_keys
from openlibrary.utils.open_syllabus_project import set_osp_dump_location
logger = logging.getLogger("openlibrary.solr-builder")
def config_section_to_dict(config_file: str, section: str) -> dict:
"""
Read a config file's section as a dict
:param str config_file: filename of config file
:param str section: section to pull data from
:return: dict of key value pairs
"""
config = ConfigParser()
config.read(config_file)
result = {key: config.get(section, key) for key in config.options(section)}
return result
def safeget(func):
"""
>>> safeget(lambda: {}['foo'])
>>> safeget(lambda: {}['foo']['bar'][0])
>>> safeget(lambda: {'foo': []}['foo'][0])
>>> safeget(lambda: {'foo': {'bar': [42]}}['foo']['bar'][0])
42
"""
try:
return func()
except KeyError:
return None
except IndexError:
return None
class LocalPostgresDataProvider(DataProvider):
"""
This class uses a local postgres dump of the database.
"""
def __init__(self, db_conf_file: str):
"""
:param str db_conf_file: file to DB config with [postgres] section
"""
super().__init__()
self._db_conf = config_section_to_dict(db_conf_file, "postgres")
self._conn: psycopg2._psycopg.connection = None
self.cache: dict = {}
self.cached_work_editions_ranges: list = []
self.cached_work_ratings: dict[str, WorkRatingsSummary] = {}
self.cached_work_reading_logs: dict[str, WorkReadingLogSolrSummary] = {}
def __enter__(self) -> Self:
"""
:rtype: LocalPostgresDataProvider
"""
self._conn = psycopg2.connect(**self._db_conf)
return self
def __exit__(self, type, value, traceback):
self.clear_cache()
self._conn.close()
def query_all(self, query: str, json_cache: dict | None = None) -> list:
"""
:param json_cache: if specified, adds records in the second column to the
provided dict, with the first column as keys
"""
cur = self._conn.cursor()
cur.execute(query)
rows = cur.fetchall()
cur.close()
if rows:
if json_cache is not None:
json_cache.update({row[0]: row[1] for row in rows})
return rows
else:
return []
def query_iter(self, query, size=20):
cur = self._conn.cursor()
cur.execute(query)
while True:
rows = cur.fetchmany(size)
if not rows:
break
yield from rows
cur.close()
def query_batched(
self,
query: str,
size: int,
cursor_name: str | None = None,
cache_json: bool = False,
) -> Iterator:
"""
:param str query:
:param int size:
:param str or None cursor_name: if wanting to use a specific cursor
:param bool cache_json: Requires the select statement to be "Key", "JSON"
:return: None
"""
# Not sure if this name needs to be unique
cursor_name = (
cursor_name or 'solr_builder_server_side_cursor_' + uuid.uuid4().hex
)
cur = self._conn.cursor(name=cursor_name)
cur.itersize = size
cur.execute(query)
while True:
rows = cur.fetchmany(size)
if not rows:
break
else:
if cache_json:
self.cache.update({row[0]: row[1] for row in rows})
yield rows
cur.close()
def cache_edition_works(self, lo_key, hi_key):
q = f"""
SELECT works."Key", works."JSON"
FROM "test" editions
INNER JOIN test works
ON editions."JSON" -> 'works' -> 0 ->> 'key' = works."Key"
WHERE editions."Type" = '/type/edition'
AND '{lo_key}' <= editions."Key" AND editions."Key" <= '{hi_key}'
"""
self.query_all(q, json_cache=self.cache)
def cache_work_editions(self, lo_key, hi_key):
q = f"""
SELECT "Key", "JSON"
FROM "test"
WHERE "Type" = '/type/edition'
AND '{lo_key}' <= "JSON" -> 'works' -> 0 ->> 'key'
AND "JSON" -> 'works' -> 0 ->> 'key' <= '{hi_key}'
"""
self.query_all(q, json_cache=self.cache)
self.cached_work_editions_ranges.append((lo_key, hi_key))
def cache_edition_authors(self, lo_key, hi_key):
q = f"""
SELECT authors."Key", authors."JSON"
FROM "test" editions
INNER JOIN test works
ON editions."JSON" -> 'works' -> 0 ->> 'key' = works."Key"
INNER JOIN test authors
ON works."JSON" -> 'authors' -> 0 -> 'author' ->> 'key' = authors."Key"
WHERE editions."Type" = '/type/edition'
AND editions."JSON" -> 'works' -> 0 ->> 'key' IS NULL
AND '{lo_key}' <= editions."Key" AND editions."Key" <= '{hi_key}'
"""
self.query_all(q, json_cache=self.cache)
def cache_work_authors(self, lo_key, hi_key):
# Cache upto first five authors
q = f"""
SELECT authors."Key", authors."JSON"
FROM "test" works
INNER JOIN "test" authors ON (
works."JSON" -> 'authors' -> 0 -> 'author' ->> 'key' = authors."Key" OR
works."JSON" -> 'authors' -> 1 -> 'author' ->> 'key' = authors."Key" OR
works."JSON" -> 'authors' -> 2 -> 'author' ->> 'key' = authors."Key" OR
works."JSON" -> 'authors' -> 3 -> 'author' ->> 'key' = authors."Key" OR
works."JSON" -> 'authors' -> 4 -> 'author' ->> 'key' = authors."Key"
)
WHERE works."Type" = '/type/work'
AND '{lo_key}' <= works."Key" AND works."Key" <= '{hi_key}'
"""
self.query_all(q, json_cache=self.cache)
def cache_work_ratings(self, lo_key, hi_key):
q = f"""
SELECT "WorkKey", json_build_object(
'ratings_count_1', count(*) filter (where "Rating" = 1),
'ratings_count_2', count(*) filter (where "Rating" = 2),
'ratings_count_3', count(*) filter (where "Rating" = 3),
'ratings_count_4', count(*) filter (where "Rating" = 4),
'ratings_count_5', count(*) filter (where "Rating" = 5)
)
FROM "ratings"
WHERE '{lo_key}' <= "WorkKey" AND "WorkKey" <= '{hi_key}'
GROUP BY "WorkKey"
ORDER BY "WorkKey" asc
"""
self.query_all(q, json_cache=self.cached_work_ratings)
for row in self.cached_work_ratings.values():
row.update(
Ratings.work_ratings_summary_from_counts(
[row[f'ratings_count_{i}'] for i in range(1, 6)]
)
)
def cache_work_reading_logs(self, lo_key: str, hi_key: str):
per_shelf_fields = ', '.join(
f"""
'{json_name}_count', count(*) filter (where "Shelf" = '{human_name}')
""".strip()
for json_name, human_name in zip(
Bookshelves.PRESET_BOOKSHELVES_JSON.keys(),
Bookshelves.PRESET_BOOKSHELVES.keys(),
)
)
q = f"""
SELECT "WorkKey", json_build_object(
'readinglog_count', count(*),
{per_shelf_fields}
)
FROM "reading_log"
WHERE '{lo_key}' <= "WorkKey" AND "WorkKey" <= '{hi_key}'
GROUP BY "WorkKey"
ORDER BY "WorkKey" asc
"""
self.query_all(q, json_cache=self.cached_work_reading_logs)
async def cache_cached_editions_ia_metadata(self):
ocaids = list({doc['ocaid'] for doc in self.cache.values() if 'ocaid' in doc})
await self.preload_metadata(ocaids)
def find_redirects(self, key):
"""Returns keys of all things which redirect to this one."""
logger.debug("find_redirects %s", key)
q = (
"""
SELECT "Key" FROM test
WHERE "Type" = '/type/redirect' AND "JSON" ->> 'location' = '%s'
"""
% key
)
return [r[0] for r in self.query_iter(q)]
def get_editions_of_work_direct(self, work):
q = (
"""
SELECT "JSON" FROM test
WHERE "Type" = '/type/edition' AND "JSON" -> 'works' -> 0 ->> 'key' = '%s'
"""
% work['key']
)
return [r[0] for r in self.query_iter(q)]
def get_editions_of_work(self, work):
# They should all be cached...
cache_hit = any(
lo <= work['key'] <= hi for (lo, hi) in self.cached_work_editions_ranges
)
if cache_hit:
return [
doc
for doc in self.cache.values()
if (
doc['type']['key'] == '/type/edition'
and safeget(lambda: doc['works'][0]['key'] == work['key'])
)
]
else:
return self.get_editions_of_work_direct(work)
def get_work_ratings(self, work_key: str) -> WorkRatingsSummary | None:
return self.cached_work_ratings.get(work_key)
def get_work_reading_log(self, work_key: str) -> WorkReadingLogSolrSummary | None:
return self.cached_work_reading_logs.get(work_key)
async def get_document(self, key):
if key in self.cache:
logger.debug("get_document cache hit %s", key)
return self.cache[key]
logger.debug("get_document cache miss %s", key)
q = (
"""
SELECT "JSON" FROM test
WHERE "Key" = '%s'
"""
% key
)
row = next(self.query_iter(q))
if row:
return row[0]
def clear_cache(self):
super().clear_cache()
self.cached_work_editions_ranges.clear()
self.cached_work_ratings.clear()
self.cache.clear()
def simple_timeit(fn):
start = time.time()
result = fn()
end = time.time()
return end - start, result
async def simple_timeit_async(awaitable: Awaitable):
start = time.time()
result = await awaitable
end = time.time()
return end - start, result
def build_job_query(
job: Literal['works', 'orphans', 'authors', 'lists'],
start_at: str | None = None,
offset: int = 0,
last_modified: str | None = None,
limit: int | None = None,
) -> str:
"""
:param job: job to complete
:param start_at: key (type-prefixed) to start from as opposed to offset; WAY more
efficient since offset has to walk through all `offset` rows.
:param offset: Use `start_at` if possible.
:param last_modified: Only import docs modified after this date.
"""
type = {
"works": "work",
"orphans": "edition",
"authors": "author",
"lists": "list",
}[job]
q_select = """SELECT "Key", "JSON" FROM test"""
q_where = """WHERE "Type" = '/type/%s'""" % type
q_order = """ORDER BY "Key" """
q_offset = ""
q_limit = ""
if offset:
q_offset = """OFFSET %d""" % offset
if limit:
q_limit = """LIMIT %d""" % limit
if last_modified:
q_where += """ AND "LastModified" >= '%s'""" % last_modified
q_order = ""
q_limit = ""
if start_at:
q_where += """ AND "Key" >= '%s'""" % start_at
if job == 'orphans':
q_where += """ AND "JSON" -> 'works' -> 0 ->> 'key' IS NULL"""
return f"{q_select} {q_where} {q_order} {q_offset} {q_limit}"
async def main(
cmd: Literal['index', 'fetch-end'],
job: Literal['works', 'orphans', 'authors', 'lists'],
osp_dump: Path | None = None,
postgres="postgres.ini",
ol="http://ol/",
ol_config="../../conf/openlibrary.yml",
solr: str | None = None,
skip_solr_id_check: bool = True,
start_at: str | None = None,
offset=0,
limit=1,
last_modified: str | None = None,
progress: str | None = None,
log_file: str | None = None,
log_level=logging.INFO,
dry_run: bool = False,
) -> None:
"""
:param cmd: Whether to do the index or just fetch end of the chunk
:param job: Type to index. Orphans gets orphaned editions.
:param postgres: Path to postgres config file
:param ol: Open Library endpoint
:param ol_config: Path to Open Library config file
:param solr: Overwrite solr base url from ol_config
:param start_at: key (type-prefixed) to start from as opposed to offset; WAY more
efficient since offset has to walk through all `offset` rows.
:param offset: Use `start_at` if possible.
:param last_modified: Limit results to those modifier >= this date
:param progress: Where/if to save progress indicator to
:param log_file: Redirect logs to file instead of stdout
"""
logging.basicConfig(
filename=log_file,
level=log_level,
format="%(asctime)s [%(levelname)s] %(message)s",
)
if solr:
update.set_solr_base_url(solr)
set_osp_dump_location(osp_dump)
PLogEntry = namedtuple(
'PLogEntry',
[
'seen',
'total',
'percent',
'elapsed',
'q_1',
'q_auth',
'q_ia',
'cached',
'ia_cache',
'next',
],
)
class PLog:
def __init__(self, filename):
"""
:param str or None filename:
"""
self.filename = filename
self.last_entry = None
def log(self, entry):
"""
:param PLogEntry entry:
"""
self.last_entry = entry
if self.filename:
with open(progress, 'a') as f:
f.write(
'\t'.join(
self.fmt(k, val) for k, val in entry._asdict().items()
)
)
f.write('\n')
def update(
self,
seen: str | int | None = None,
total: str | int | None = None,
percent: str | float | None = None,
elapsed: str | float | None = None,
q_1: str | float | None = None,
q_auth: str | float | None = None,
cached: str | int | None = None,
q_ia: str | float | None = None,
ia_cache: str | int | None = None,
next: str | None = None,
) -> None:
"""
:param str or int or None seen:
:param str or int or None total:
:param str or float or None percent:
:param str or float or None elapsed:
:param str or float or None q_1:
:param str or float or None q_auth:
:param str or int or None cached:
:param str or float or None q_ia:
:param str or int or None ia_cache:
:param str or None next:
:return: None
"""
args = locals()
entry = self.last_entry._replace(
**{f: args[f] for f in PLogEntry._fields if args[f] is not None}
)
self.log(entry)
def fmt(self, k: str, val: Any) -> str:
"""
:param str k:
:param Any val:
:return: str
"""
if val is None:
return '?'
if isinstance(val, str):
return val
if k == 'percent':
return '%.2f%%' % (100 * val)
if k in ['elapsed', 'q_1', 'q_auth', 'q_ia']:
return '%.2fs' % val
if isinstance(val, float):
return '%.2f' % val
if k == 'next':
return val.split('/')[-1]
return str(val)
plog = PLog(progress)
# load the contents of the config?
with LocalPostgresDataProvider(postgres) as db:
# Check to see where we should be starting from
if cmd == 'fetch-end':
next_start_query = build_job_query(job, start_at, limit, last_modified, 1)
next_start_results = db.query_all(next_start_query)
if next_start_results:
print(next_start_results[0][0])
return
logger.info(
json.dumps(
{
'scope': 'solr_builder::main',
'event': 'Indexing started',
'start_at': start_at,
}
)
)
load_configs(ol, ol_config, db)
q = build_job_query(job, start_at, offset, last_modified, limit)
if progress:
# Clear the file
async with aiofiles.open(progress, 'w') as f:
await f.write('')
async with aiofiles.open(progress, 'a') as f:
await f.write('Calculating total... ')
start = time.time()
q_count = """SELECT COUNT(*) FROM(%s) AS foo""" % q
count = db.query_all(q_count)[0][0]
end = time.time()
if progress:
async with aiofiles.open(progress, 'a') as f:
await f.write('%d (%.2fs)\n' % (count, end - start))
await f.write('\t'.join(PLogEntry._fields) + '\n')
plog.log(
PLogEntry(0, count, '0.00%', 0, '?', '?', '?', '?', '?', start_at or '?')
)
plog.update(q_1=0, q_auth=0, q_ia=0)
start = time.time()
seen = 0
for batch in db.query_batched(q, size=1000, cache_json=True):
keys = [x[0] for x in batch]
plog.update(next=keys[0], cached=len(db.cache), ia_cache=0)
with LocalPostgresDataProvider(postgres) as db2:
key_range = [keys[0], keys[-1]]
if job == "works":
# cache editions
editions_time, _ = simple_timeit(
lambda: db2.cache_work_editions(*key_range)
)
plog.update(
q_1=plog.last_entry.q_1 + editions_time,
cached=len(db.cache) + len(db2.cache),
)
# cache editions' ocaid metadata
ocaids_time, _ = await simple_timeit_async(
db2.cache_cached_editions_ia_metadata()
)
plog.update(
q_ia=plog.last_entry.q_ia + ocaids_time,
ia_cache=len(db2.ia_cache),
)
# cache authors
authors_time, _ = simple_timeit(
lambda: db2.cache_work_authors(*key_range)
)
plog.update(
q_auth=plog.last_entry.q_auth + authors_time,
cached=len(db.cache) + len(db2.cache),
)
# cache ratings and reading logs
db2.cache_work_ratings(*key_range)
db2.cache_work_reading_logs(*key_range)
elif job == "orphans":
# cache editions' ocaid metadata
ocaids_time, _ = await simple_timeit_async(
db2.cache_cached_editions_ia_metadata()
)
plog.update(
q_ia=plog.last_entry.q_ia + ocaids_time,
ia_cache=len(db2.ia_cache),
)
# cache authors
authors_time, _ = simple_timeit(
lambda: db2.cache_edition_authors(*key_range)
)
plog.update(
q_auth=plog.last_entry.q_auth + authors_time,
cached=len(db.cache) + len(db2.cache),
)
elif job == 'lists':
# Nothing to cache ; just need the lists themselves and
# they are already cached
pass
elif job == "authors":
# Nothing to cache; update.py queries solr directly for each
# other, and provides no way to cache.
pass
# Store in main cache
db.cache.update(db2.cache)
db.ia_cache.update(db2.ia_cache)
db.cached_work_editions_ranges += db2.cached_work_editions_ranges
db.cached_work_ratings.update(db2.cached_work_ratings)
db.cached_work_reading_logs.update(db2.cached_work_reading_logs)
await update_keys(
keys,
commit=False,
skip_id_check=skip_solr_id_check,
update='quiet' if dry_run else 'update',
)
seen += len(keys)
plog.update(
elapsed=time.time() - start,
seen=seen,
percent=seen / count,
cached=len(db.cache),
ia_cache=len(db.ia_cache),
)
db.clear_cache()
if __name__ == '__main__':
from scripts.solr_builder.solr_builder.fn_to_cli import FnToCLI
FnToCLI(main).run()
| 22,017 | Python | .py | 572 | 27.215035 | 87 | 0.516173 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
519 | fn_to_cli.py | internetarchive_openlibrary/scripts/solr_builder/solr_builder/fn_to_cli.py | import asyncio
from pathlib import Path
import types
import typing
from argparse import (
ArgumentParser,
ArgumentDefaultsHelpFormatter,
BooleanOptionalAction,
Namespace,
)
class FnToCLI:
"""
A utility class which automatically infers and generates ArgParse command
line options from a function based on defaults/type annotations
This is _very_ basic; supports:
* Args of int, str types (same logic as default argparse)
* Args of bool type (Uses argparse BooleanOptionalAction)
* eg `do_blah=False` becomes `--do-blah, --no-do-blah`
* Args of typing.Optional (or anything with a default)
* Args of typing.Literal (uses argparse choices)
* eg `color: Literal['red, 'black']` becomes `--color red|black` (with docs)
* Type deduction of default values
* Supports async functions automatically
* Includes docstring if it's in `:param my_arg: Description of my arg` format
Anything else will likely error :)
Example:
if __name__ == '__main__':
FnToCLI(my_func).run()
"""
def __init__(self, fn: typing.Callable):
self.fn = fn
arg_names = fn.__code__.co_varnames[: fn.__code__.co_argcount]
annotations = typing.get_type_hints(fn)
defaults: list = fn.__defaults__ or [] # type: ignore[assignment]
num_required = len(arg_names) - len(defaults)
default_args = arg_names[num_required:]
defaults: dict = { # type: ignore[no-redef]
arg: default for [arg, default] in zip(default_args, defaults)
}
docs = fn.__doc__ or ''
arg_docs = self.parse_docs(docs)
self.parser = ArgumentParser(
description=docs.split(':param', 1)[0],
formatter_class=ArgumentDefaultsHelpFormatter,
)
self.args: Namespace | None = None
for arg in arg_names:
optional = arg in defaults
cli_name = arg.replace('_', '-')
if arg in annotations:
arg_opts = self.type_to_argparse(annotations[arg])
elif arg in defaults:
arg_opts = self.type_to_argparse(type(defaults[arg])) # type: ignore[call-overload]
else:
raise ValueError(f'{arg} has no type information')
# Help needs to always be defined, or it won't show the default :/
arg_opts['help'] = arg_docs.get(arg) or '-'
if optional:
opt_name = f'--{cli_name}' if len(cli_name) > 1 else f'-{cli_name}'
self.parser.add_argument(opt_name, default=defaults[arg], **arg_opts) # type: ignore[call-overload]
else:
self.parser.add_argument(cli_name, **arg_opts)
def parse_args(self, args: typing.Sequence[str] | None = None):
self.args = self.parser.parse_args(args)
return self.args
def args_dict(self):
if not self.args:
self.parse_args()
return {k.replace('-', '_'): v for k, v in self.args.__dict__.items()}
def run(self):
args_dicts = self.args_dict()
if asyncio.iscoroutinefunction(self.fn):
return asyncio.run(self.fn(**args_dicts))
else:
return self.fn(**args_dicts)
@staticmethod
def parse_docs(docs):
params = docs.strip().split(':param ')[1:]
params = [p.strip() for p in params]
params = [p.split(':', 1) for p in params if p]
return {name: docs.strip() for [name, docs] in params}
@staticmethod
def type_to_argparse(typ: type) -> dict:
if FnToCLI.is_optional(typ):
return FnToCLI.type_to_argparse(
next(t for t in typing.get_args(typ) if not isinstance(t, type(None)))
)
if typ is bool:
return {'type': typ, 'action': BooleanOptionalAction}
simple_types = (int, str, float, Path)
if typ in simple_types:
return {'type': typ}
if typing.get_origin(typ) is list:
subtype = typing.get_args(typ)[0]
if subtype in simple_types:
return {'nargs': '*', 'type': subtype}
if typing.get_origin(typ) == typing.Literal:
return {'choices': typing.get_args(typ)}
raise ValueError(f'Unsupported type: {typ}')
@staticmethod
def is_optional(typ: type) -> bool:
return (
(typing.get_origin(typ) is typing.Union or isinstance(typ, types.UnionType))
and type(None) in typing.get_args(typ)
and len(typing.get_args(typ)) == 2
)
if __name__ == '__main__':
def fn(nums: list[int]):
print(sum(nums))
cli = FnToCLI(fn)
cli.run()
| 4,699 | Python | .py | 111 | 33.369369 | 116 | 0.598817 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
520 | index_subjects.py | internetarchive_openlibrary/scripts/solr_builder/solr_builder/index_subjects.py | import asyncio
import json
from asyncio import Future
from typing import Literal
import httpx
from openlibrary.solr.utils import solr_insert_documents, str_to_key
from scripts.solr_builder.solr_builder.fn_to_cli import FnToCLI
from scripts.solr_builder.solr_builder.solr_builder import safeget
def subject_name_to_key(
subject_type: Literal['subject', 'person', 'place', 'time'], subject_name: str
) -> str:
escaped_subject_name = str_to_key(subject_name)
if subject_type == 'subject':
return f"/subjects/{escaped_subject_name}"
else:
return f"/subjects/{subject_type}:{escaped_subject_name}"
def build_subject_doc(
subject_type: Literal['subject', 'person', 'place', 'time'],
subject_name: str,
work_count: int,
):
"""Build the `type:subject` solr doc for this subject."""
return {
'key': subject_name_to_key(subject_type, subject_name),
'name': subject_name,
'type': 'subject',
'subject_type': subject_type,
'work_count': work_count,
}
async def index_subjects(
subject_type: Literal['subject', 'person', 'place', 'time'],
offset=0,
limit=1,
solr_base_url='http://solr:8983/solr/openlibrary',
skip_id_check=False,
):
"""
:return: Returns number of rows added
"""
print(json.dumps({'event': 'starting', 'offset': offset}))
async with httpx.AsyncClient() as client:
resp = (
await client.get(
f'{solr_base_url}/select',
# Can be slow since we require such a large facet in a chunk
timeout=180,
params={
'q': 'type:work',
'rows': 0,
'facet': 'true',
'facet.field': f'{subject_type}_facet',
'facet.limit': limit,
'facet.offset': offset,
'facet.sort': 'index',
'facet.mincount': 1,
'wt': 'json',
'json.nl': 'arrarr',
},
)
).json()
facets = resp['facet_counts']['facet_fields'][f'{subject_type}_facet']
docs = [
build_subject_doc(subject_type, subject_name, work_count)
for [subject_name, work_count] in facets
]
await solr_insert_documents(
docs,
solr_base_url=solr_base_url,
skip_id_check=skip_id_check,
)
print(
json.dumps(
{
'event': 'completed',
'offset': offset,
'count': len(docs),
'first': safeget(lambda: facets[0][0]),
}
)
)
return len(docs)
async def index_all_subjects(
subject_type: Literal['subject', 'person', 'place', 'time'],
chunk_size=10_000,
instances=2,
solr_base_url='http://solr:8983/solr/openlibrary',
skip_id_check=False,
):
done = False
active_workers: set[Future] = set()
offset = 0
while True:
if done:
# Done! Wait for any previous workers that are still going
await asyncio.gather(*active_workers)
break
elif len(active_workers) >= instances:
# Too many running; wait for one to finish
finished, pending = await asyncio.wait(
active_workers, return_when=asyncio.FIRST_COMPLETED
)
active_workers = pending
done = any(task.result() < chunk_size for task in finished)
else:
# Can start another worker
task = asyncio.create_task(
index_subjects(
subject_type,
offset=offset,
limit=chunk_size,
solr_base_url=solr_base_url,
skip_id_check=skip_id_check,
)
)
active_workers.add(task)
offset += chunk_size
if __name__ == '__main__':
cli = FnToCLI(index_all_subjects)
print(cli.parse_args())
cli.run()
| 4,039 | Python | .py | 120 | 24.15 | 82 | 0.554019 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
521 | sitemap.py | internetarchive_openlibrary/scripts/sitemaps/sitemap.py | #!/usr/bin/env python
"""Script to generate XML sitemap of openlibrary.org website.
USAGE:
python sitemaps.py suffix dump.txt.gz
"""
import gzip
import itertools
import json
import logging
import os
import re
from collections.abc import Iterator
from contextlib import contextmanager
from datetime import datetime
from time import perf_counter
import web
t_sitemap = """$def with (things)
<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
$for t in things:
<url>
<loc>https://openlibrary.org$t.path</loc>
<lastmod>$t.last_modified</lastmod>
</url>
</urlset>
"""
t_siteindex = """$def with (names, timestamp)
<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
$for name in names:
<sitemap>
<loc>https://openlibrary.org/static/sitemaps/$name</loc>
<lastmod>$timestamp</lastmod>
</sitemap>
</sitemapindex>
"""
logger = logging.getLogger(__file__)
logger.setLevel(logging.DEBUG)
sitemap = web.template.Template(t_sitemap)
siteindex = web.template.Template(t_siteindex)
def log(*args) -> None:
args_str = " ".join(str(a) for a in args)
msg = f"{datetime.now():%Y-%m-%d %H:%M:%S} [openlibrary.dump] {args_str}"
logger.info(msg)
print(msg, file=sys.stderr)
@contextmanager
def elapsed_time(name: str = "elapsed_time"):
"""
Two ways to use elapsed_time():
1. As a decorator to time the execution of an entire function:
@elapsed_time("my_slow_function")
def my_slow_function(n=10_000_000):
pass
2. As a context manager to time the execution of a block of code inside a function:
with elapsed_time("my_slow_block_of_code"):
pass
"""
start = perf_counter()
yield
log(f"Elapsed time ({name}): {perf_counter() - start:0.8} seconds")
def xopen(filename: str):
return gzip.open(filename) if filename.endswith(".gz") else open(filename)
def urlsafe(name: str) -> str:
"""Slugifies the name to produce OL url slugs
XXX This is duplicated from openlibrary.core.helpers because there
isn't a great way to import the methods from openlibrary as a
package
"""
# unsafe chars according to RFC 2396
reserved = ";/?:@&=+$,"
delims = '<>#%"'
unwise = "{}|\\^[]`"
space = ' \n\r'
unsafe = reserved + delims + unwise + space
pattern = f"[{''.join(re.escape(c) for c in unsafe)}]+"
safepath_re = re.compile(pattern)
return safepath_re.sub('_', name).replace(' ', '-').strip('_')[:100]
@elapsed_time("process_dump")
def process_dump(
dumpfile: str, *, verbose: bool = False
) -> Iterator[tuple[str, str, str]]:
"""Generates a summary file used to generate sitemaps.
The summary file contains: sort-key, path and last_modified columns.
"""
rows = (line.decode().strip().split("\t") for line in xopen(dumpfile))
yield_count = 0
for i, (type, key, revision, last_modified, jsontext) in enumerate(rows, 1):
if type not in ('/type/work', '/type/author'):
continue
doc = json.loads(jsontext)
name_or_title = 'name' if type == '/type/author' else 'title'
title = doc.get(name_or_title, '')
path = f"{key}/{urlsafe(title.strip())}"
last_modified = f"{last_modified.replace(' ', 'T')}Z"
if sortkey := get_sort_key(key):
yield (sortkey, path, last_modified)
yield_count += 1
if verbose and yield_count % 500_000 == 0:
log(f"{i:,} records with {yield_count:,} yielded ({yield_count / i:.2f}%)")
log(
"process_dump complete: "
f"{i:,} records with {yield_count:,} yielded ({yield_count / i:.2f}%)"
)
re_key = re.compile(r"^/(authors|works)/OL\d+[AMW]$")
def get_sort_key(key: str) -> str | None:
"""Returns a sort key used to group urls in 10K batches.
>>> get_sort_key("/authors/OL123456A")
'authors_0012'
"""
if m := re_key.match(key):
return f"{m.group(1)}_{int(web.numify(key)) // 10000:04}"
return None
@elapsed_time("generate_sitemaps")
def generate_sitemaps(filename: str) -> None:
rows = (line.strip().split("\t") for line in open(filename))
for sortkey, chunk in itertools.groupby(rows, lambda row: row[0]):
things = []
_chunk = list(chunk)
for segment in _chunk:
sortkey = segment.pop(0)
last_modified = segment.pop(-1)
path = ''.join(segment)
things.append(web.storage(path=path, last_modified=last_modified))
if things:
write(f"sitemaps/sitemap_{sortkey}.xml.gz", sitemap(things))
@elapsed_time("generate_siteindex")
def generate_siteindex() -> None:
filenames = sorted(os.listdir("sitemaps"))
if "siteindex.xml.gz" in filenames:
filenames.remove("siteindex.xml.gz")
timestamp = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
index = siteindex(filenames, timestamp)
write("sitemaps/siteindex.xml.gz", index)
def write(path: str, text: str) -> None:
try:
text = web.safestr(text)
log('writing', path, text.count('\n'))
with gzip.open(path, 'w') as f:
f.write(text.encode())
except Exception as e:
log(f'write fail {e}')
# os.system("gzip " + path)
def write_tsv(path: str, rows: Iterator[tuple[str, str, str]]) -> None:
lines = ("\t".join(row) + "\n" for row in rows)
with open(path, "w") as f:
f.writelines(lines)
def system_memory() -> int:
"""Linux-specific. Returns system memory in MB."""
try:
x = os.popen("cat /proc/meminfo | grep MemTotal | sed 's/[^0-9]//g'").read()
# proc gives memory in KB, converting it to MB
return int(x) // 1024
except OSError:
# default to 1024MB
return 1024
def system(cmd) -> None:
log("executing:", cmd)
if (status := os.system(cmd)) != 0:
raise Exception("%r failed with exit status: %d" % (cmd, status))
@elapsed_time(f"{__file__}.main")
def main(dumpfile: str) -> None:
system("rm -rf sitemaps sitemaps_data.txt*; mkdir sitemaps")
log("processing the dump")
rows = process_dump(dumpfile)
write_tsv("sitemaps_data.txt", rows)
log("sorting sitemaps_data.txt")
# use half of system of 3GB whichever is smaller
sort_mem = min(system_memory() / 2, 3072)
system("sort -S%dM sitemaps_data.txt > sitemaps_data.txt.sorted" % sort_mem)
log("generating sitemaps")
generate_sitemaps("sitemaps_data.txt.sorted")
generate_siteindex()
log("done")
if __name__ == "__main__":
import sys
main(sys.argv[1])
| 6,694 | Python | .py | 176 | 32.556818 | 87 | 0.636251 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
522 | Dockerfile.olpython | internetarchive_openlibrary/scripts/solr_builder/Dockerfile.olpython | FROM openlibrary/olbase:latest
ENV PYTHONPATH=/openlibrary:/openlibrary/vendor/infogami
USER root
COPY requirements*.txt ./
RUN pip install -r requirements_test.txt
WORKDIR /openlibrary/scripts/solr_builder
RUN pip install \
# For real-time profiling
cprofilev \
# Faster python
Cython==3.0.6
# Build cython files
COPY . /openlibrary
RUN ./build-cython.sh
EXPOSE 4000
| 390 | Python | .pyt | 15 | 23.533333 | 56 | 0.791328 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
523 | attach_debugger.html | internetarchive_openlibrary/openlibrary/templates/admin/attach_debugger.html | $def with (python_version, keys="", error="")
$var title: $_("Attach Debugger")
<div id="contentHead">
$:render_template("admin/menu")
<h1>$_('Attach Debugger on Python %(version_number)s', version_number=python_version)</h1>
</div>
<div id="contentBody">
<div>
$_("Start a debugger on port 3000.")<br/>
</div>
<form
method="POST"
class="olform"
onsubmit="this._start.disabled = true; this._start.textContent = $dumps(_('Waiting for debugger to attach...'));"
>
<div class="formElement">
<div class="formElement collapse">
<button type="submit" class="larger" name="_start">$_("Start")</button>
</div>
</div>
</form>
</div>
| 744 | Python | .tac | 22 | 27.363636 | 121 | 0.582173 | internetarchive/openlibrary | 5,078 | 1,311 | 956 | AGPL-3.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
524 | setup.py | res0nat0r_gitosis/setup.py | #!/usr/bin/python
from setuptools import setup, find_packages
import os
def _subdir_contents(path):
for toplevel in os.listdir(path):
toplevel_path = os.path.join(path, toplevel)
if not os.path.isdir(toplevel_path):
continue
for dirpath, dirnames, filenames in os.walk(toplevel_path):
for filename in filenames:
full_path = os.path.join(dirpath, filename)
if not full_path.startswith(path+'/'):
raise RuntimeError()
yield full_path[len(path)+1:]
def subdir_contents(path):
return list(_subdir_contents(path))
setup(
name = "gitosis",
version = "0.2",
packages = find_packages(),
author = "Tommi Virtanen",
author_email = "[email protected]",
description = "software for hosting git repositories",
long_description = """
Manage git repositories, provide access to them over SSH, with tight
access control and not needing shell accounts.
gitosis aims to make hosting git repos easier and safer. It manages
multiple repositories under one user account, using SSH keys to
identify users. End users do not need shell accounts on the server,
they will talk to one shared account that will not let them run
arbitrary commands.
""".strip(),
license = "GPL",
keywords = "git scm version-control ssh",
url = "http://eagain.net/software/gitosis/",
entry_points = {
'console_scripts': [
'gitosis-serve = gitosis.serve:Main.run',
'gitosis-run-hook = gitosis.run_hook:Main.run',
'gitosis-init = gitosis.init:Main.run',
],
},
package_data = {
# this seems to be the only way to convince setuptools
# to include things recursively
'gitosis.templates': subdir_contents('gitosis/templates'),
},
# templates need to be a real directory, for git init
zip_safe=False,
install_requires=[
# setuptools 0.6a9 will have a non-executeable post-update
# hook, this will make gitosis-admin settings not update
# (fixed in 0.6c5, maybe earlier)
'setuptools>=0.6c5',
],
)
| 2,169 | Python | .py | 56 | 31.821429 | 68 | 0.656518 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
525 | copyright | res0nat0r_gitosis/debian/copyright | This package was debianized by Tommi Virtanen <[email protected]> on
Tue, 28 Aug 2007 21:32:22 -0700.
It was originally downloaded from http://eagain.net/software/gitosis/
Upstream Author: Tommi Virtanen <[email protected]>
Copyright:
Gitosis -- git repository hosting application
Copyright (C) 2007 Tommi Virtanen
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
On Debian Linux systems, the complete text of the GNU General Public
License can be found in '/usr/share/common-licenses/GPL'.
| 956 | Python | .py | 17 | 52.470588 | 72 | 0.77897 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
526 | gitweb.py | res0nat0r_gitosis/gitosis/gitweb.py | """
Generate ``gitweb`` project list based on ``gitosis.conf``.
To plug this into ``gitweb``, you have two choices.
- The global way, edit ``/etc/gitweb.conf`` to say::
$projects_list = "/path/to/your/projects.list";
Note that there can be only one such use of gitweb.
- The local way, create a new config file::
do "/etc/gitweb.conf" if -e "/etc/gitweb.conf";
$projects_list = "/path/to/your/projects.list";
# see ``repositories`` in the ``gitosis`` section
# of ``~/.gitosis.conf``; usually ``~/repositories``
# but you need to expand the tilde here
$projectroot = "/path/to/your/repositories";
Then in your web server, set environment variable ``GITWEB_CONFIG``
to point to this file.
This way allows you have multiple separate uses of ``gitweb``, and
isolates the changes a bit more nicely. Recommended.
"""
import os, urllib, logging
from ConfigParser import NoSectionError, NoOptionError
from gitosis import util
def _escape_filename(s):
s = s.replace('\\', '\\\\')
s = s.replace('$', '\\$')
s = s.replace('"', '\\"')
return s
def enum_cfg_repos(config):
"""
Enumerates all repositories that have repo sections in the config.
"""
repositories = util.getRepositoryDir(config)
for section in config.sections():
l = section.split(None, 1)
type_ = l.pop(0)
if type_ != 'repo':
continue
if not l:
continue
name, = l
if not os.path.exists(os.path.join(repositories, name)):
subpath = '%s.git' % name
else:
subpath = name
yield (section, name, repositories, subpath)
def generate_project_list_fp(config, fp):
"""
Generate projects list for ``gitweb``.
:param config: configuration to read projects from
:type config: RawConfigParser
:param fp: writable for ``projects.list``
:type fp: (file-like, anything with ``.write(data)``)
"""
log = logging.getLogger('gitosis.gitweb.generate_projects_list')
try:
global_enable = config.getboolean('gitosis', 'gitweb')
except (NoSectionError, NoOptionError):
global_enable = False
for (section, name, topdir, subpath) in enum_cfg_repos(config):
try:
enable = config.getboolean(section, 'gitweb')
except (NoSectionError, NoOptionError):
enable = global_enable
if not enable:
continue
if not os.path.exists(os.path.join(topdir,subpath)):
log.warning(
'Cannot find %(name)r in %(topdir)r'
% dict(name=name,topdir=topdir))
# preserve old behavior, using the original name for
# completely nonexistant repos:
subpath = name
response = [subpath]
try:
owner = config.get(section, 'owner')
except (NoSectionError, NoOptionError):
pass
else:
try:
username = config.get('user %s' % owner, 'name')
except (NoSectionError, NoOptionError):
pass
else:
response.append(username)
response.append(owner)
line = ' '.join([urllib.quote_plus(s) for s in response])
print >>fp, line
def generate_project_list(config, path):
"""
Generate projects list for ``gitweb``.
:param config: configuration to read projects from
:type config: RawConfigParser
:param path: path to write projects list to
:type path: str
"""
tmp = '%s.%d.tmp' % (path, os.getpid())
f = file(tmp, 'w')
try:
generate_project_list_fp(config=config, fp=f)
finally:
f.close()
os.rename(tmp, path)
def set_descriptions(config):
"""
Set descriptions for gitweb use.
"""
log = logging.getLogger('gitosis.gitweb.set_descriptions')
for (section, name, topdir, subpath) in enum_cfg_repos(config):
try:
description = config.get(section, 'description')
except (NoSectionError, NoOptionError):
continue
if not description:
continue
if not os.path.exists(os.path.join(topdir,subpath)):
log.warning(
'Cannot find %(name)r in %(topdir)r'
% dict(name=name,topdir=topdir))
continue
path = os.path.join(
topdir,
subpath,
'description',
)
tmp = '%s.%d.tmp' % (path, os.getpid())
f = file(tmp, 'w')
try:
print >>f, description
finally:
f.close()
os.rename(tmp, path)
| 4,664 | Python | .py | 130 | 27.707692 | 70 | 0.599422 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
527 | util.py | res0nat0r_gitosis/gitosis/util.py | import errno
import os
from ConfigParser import NoSectionError, NoOptionError
def mkdir(*a, **kw):
try:
os.mkdir(*a, **kw)
except OSError, e:
if e.errno == errno.EEXIST:
pass
else:
raise
def getRepositoryDir(config):
repositories = os.path.expanduser('~')
try:
path = config.get('gitosis', 'repositories')
except (NoSectionError, NoOptionError):
repositories = os.path.join(repositories, 'repositories')
else:
repositories = os.path.join(repositories, path)
return repositories
def getGeneratedFilesDir(config):
try:
generated = config.get('gitosis', 'generate-files-in')
except (NoSectionError, NoOptionError):
generated = os.path.expanduser('~/gitosis')
return generated
def getSSHAuthorizedKeysPath(config):
try:
path = config.get('gitosis', 'ssh-authorized-keys-path')
except (NoSectionError, NoOptionError):
path = os.path.expanduser('~/.ssh/authorized_keys')
return path
| 1,037 | Python | .py | 32 | 26.40625 | 65 | 0.673327 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
528 | repository.py | res0nat0r_gitosis/gitosis/repository.py | import stat
import errno
import os
import re
import subprocess
import sys
from pkg_resources import resource_filename
from gitosis import util
class GitError(Exception):
"""git failed"""
def __str__(self):
return '%s: %s' % (self.__doc__, ': '.join(self.args))
class GitInitError(Exception):
"""git init failed"""
def init(
path,
template=None,
_git=None,
):
"""
Create a git repository at C{path} (if missing).
Leading directories of C{path} must exist.
@param path: Path of repository create.
@type path: str
@param template: Template directory, to pass to C{git init}.
@type template: str
"""
if _git is None:
_git = 'git'
if template is None:
template = resource_filename('gitosis.templates', 'default')
util.mkdir(path, 0750)
args = [
_git,
'--git-dir=.',
'init',
]
hooks = []
if template:
args.append('--template=%s' % template)
template_hooks_dir = os.path.join(template, 'hooks')
if os.path.isdir(template_hooks_dir):
hooks = os.listdir(template_hooks_dir)
returncode = subprocess.call(
args=args,
cwd=path,
stdout=sys.stderr,
close_fds=True,
)
if returncode != 0:
raise GitInitError('exit status %d' % returncode)
hooks_dir = os.path.join(path, 'hooks')
if not os.path.exists(hooks_dir):
hooks_dir = os.path.join(path, '.git', 'hooks')
if not os.path.exists(hooks_dir):
raise
for hook in hooks:
os.chmod(
os.path.join(hooks_dir, hook),
0755)
class GitFastImportError(GitError):
"""git fast-import failed"""
pass
def fast_import(
git_dir,
commit_msg,
committer,
files,
parent=None,
):
"""
Create an initial commit.
"""
child = subprocess.Popen(
args=[
'git',
'--git-dir=.',
'fast-import',
'--quiet',
'--date-format=now',
],
cwd=git_dir,
stdin=subprocess.PIPE,
close_fds=True,
)
files = list(files)
for index, (path, content) in enumerate(files):
child.stdin.write("""\
blob
mark :%(mark)d
data %(len)d
%(content)s
""" % dict(
mark=index+1,
len=len(content),
content=content,
))
child.stdin.write("""\
commit refs/heads/master
committer %(committer)s now
data %(commit_msg_len)d
%(commit_msg)s
""" % dict(
committer=committer,
commit_msg_len=len(commit_msg),
commit_msg=commit_msg,
))
if parent is not None:
assert not parent.startswith(':')
child.stdin.write("""\
from %(parent)s
""" % dict(
parent=parent,
))
for index, (path, content) in enumerate(files):
child.stdin.write('M 100644 :%d %s\n' % (index+1, path))
child.stdin.close()
returncode = child.wait()
if returncode != 0:
raise GitFastImportError(
'git fast-import failed', 'exit status %d' % returncode)
class GitExportError(GitError):
"""Export failed"""
pass
class GitReadTreeError(GitExportError):
"""git read-tree failed"""
class GitCheckoutIndexError(GitExportError):
"""git checkout-index failed"""
def export(git_dir, path):
try:
os.mkdir(path)
except OSError, e:
if e.errno == errno.EEXIST:
pass
else:
raise
returncode = subprocess.call(
args=[
'git',
'--git-dir=%s' % git_dir,
'read-tree',
'HEAD',
],
close_fds=True,
)
if returncode != 0:
raise GitReadTreeError('exit status %d' % returncode)
# jumping through hoops to be compatible with git versions
# that don't have --work-tree=
env = {}
env.update(os.environ)
env['GIT_WORK_TREE'] = '.'
returncode = subprocess.call(
args=[
'git',
'--git-dir=%s' % os.path.abspath(git_dir),
'checkout-index',
'-a',
'-f',
],
cwd=path,
close_fds=True,
env=env,
)
if returncode != 0:
raise GitCheckoutIndexError('exit status %d' % returncode)
class GitHasInitialCommitError(GitError):
"""Check for initial commit failed"""
class GitRevParseError(GitError):
"""rev-parse failed"""
def has_initial_commit(git_dir):
child = subprocess.Popen(
args=[
'git',
'--git-dir=.',
'rev-parse',
'HEAD',
],
cwd=git_dir,
stdout=subprocess.PIPE,
close_fds=True,
)
got = child.stdout.read()
returncode = child.wait()
if returncode != 0:
raise GitRevParseError('exit status %d' % returncode)
if got == 'HEAD\n':
return False
elif re.match('^[0-9a-f]{40}\n$', got):
return True
else:
raise GitHasInitialCommitError('Unknown git HEAD: %r' % got)
class GitPushMirrorException(GitError):
"""push --mirror failed"""
def mirror(git_dir, remote):
returncode = subprocess.call(
args=[
'git',
'--git-dir=%s' % git_dir,
'push',
'--mirror',
remote,
],
cwd=git_dir,
close_fds=True
)
if returncode != 0:
raise GitPushMirrorException('exit status %d' % returncode) | 5,564 | Python | .py | 208 | 19.403846 | 68 | 0.560948 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
529 | ssh.py | res0nat0r_gitosis/gitosis/ssh.py | import os, errno, re
import logging
log = logging.getLogger('gitosis.ssh')
_ACCEPTABLE_USER_RE = re.compile(r'^[a-zA-Z][a-zA-Z0-9_.-]*(@[a-zA-Z][a-zA-Z0-9.-]*)?$')
def isSafeUsername(user):
match = _ACCEPTABLE_USER_RE.match(user)
return (match is not None)
def readKeys(keydir):
"""
Read SSH public keys from ``keydir/*.pub``
"""
for filename in os.listdir(keydir):
if filename.startswith('.'):
continue
basename, ext = os.path.splitext(filename)
if ext != '.pub':
continue
if not isSafeUsername(basename):
log.warn('Unsafe SSH username in keyfile: %r', filename)
continue
path = os.path.join(keydir, filename)
f = file(path)
for line in f:
line = line.rstrip('\n')
yield (basename, line)
f.close()
COMMENT = '### autogenerated by gitosis, DO NOT EDIT'
def generateAuthorizedKeys(keys):
TEMPLATE=('command="gitosis-serve %(user)s",no-port-forwarding,'
+'no-X11-forwarding,no-agent-forwarding,no-pty %(key)s')
yield COMMENT
for (user, key) in keys:
yield TEMPLATE % dict(user=user, key=key)
_COMMAND_RE = re.compile('^command="(/[^ "]+/)?gitosis-serve [^"]+",no-port-forw'
+'arding,no-X11-forwarding,no-agent-forwardi'
+'ng,no-pty .*')
def filterAuthorizedKeys(fp):
"""
Read lines from ``fp``, filter out autogenerated ones.
Note removes newlines.
"""
for line in fp:
line = line.rstrip('\n')
if line == COMMENT:
continue
if _COMMAND_RE.match(line):
continue
yield line
def writeAuthorizedKeys(path, keydir):
tmp = '%s.%d.tmp' % (path, os.getpid())
try:
in_ = file(path)
except IOError, e:
if e.errno == errno.ENOENT:
in_ = None
else:
raise
try:
out = file(tmp, 'w')
try:
if in_ is not None:
for line in filterAuthorizedKeys(in_):
print >>out, line
keygen = readKeys(keydir)
for line in generateAuthorizedKeys(keygen):
print >>out, line
os.fsync(out)
finally:
out.close()
finally:
if in_ is not None:
in_.close()
os.rename(tmp, path)
| 2,398 | Python | .py | 73 | 24.109589 | 88 | 0.556326 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
530 | mirror.py | res0nat0r_gitosis/gitosis/mirror.py | """
Created on 23 Mar 2009
@author: Damien Lebrun
"""
from ConfigParser import NoOptionError, NoSectionError
import os
import logging
from gitosis import repository
from gitosis import util
def push_mirrors(config, git_dir):
"""
Apply a push with the mirror option to all mirrors defined in gitosis.conf
of the repository being updated.
@param config: ConfiParser object loaded with gitosis.conf
@param git_dir: Path the repository being updated.
"""
log = logging.getLogger('gitosis.mirror.push_mirrors')
repository_dir = os.path.abspath(util.getRepositoryDir(config))
git_dir = os.path.abspath(git_dir)
git_name = get_git_name(repository_dir, git_dir)
log.info('Updating %s\'s mirrors.' % git_name)
for remote in get_mirrors(config, git_name):
log.info('Updating %s.' % remote)
repository.mirror(git_dir, remote)
def get_git_name(repository_dir, git_dir):
"""
Guess the name of the repository used in gitosis.conf
from the name of the git directory name:
/path/to/foo.git => foo
@param repository_dir: path to gitosis directory of repository
@param git_dir: path to repository being updated.
"""
if git_dir.startswith(repository_dir):
git_name = git_dir[len(repository_dir):]
else:
git_name = os.path.split(git_dir)[1]
git_name = git_name.strip(r'\/')
if git_name.endswith('.git'):
git_name = git_name[:-4]
return git_name
def get_mirrors(config, git_name):
"""
Get a repository mirror list from gitosis.conf.
@param config: ConfigParser object
@param git_name: the name of the repository
"""
log = logging.getLogger('gitosis.mirror.get_mirrors')
try:
mirrors = config.get('repo %s' % git_name, 'mirrors')
for mirror in mirrors.split():
yield mirror
except (NoSectionError, NoOptionError):
pass
mirror_sections = (s for s in config.sections() if s.startswith('mirror '))
for section in mirror_sections:
try:
repos = config.get(section, 'repos')
if repos == '@all' or git_name in repos.split():
yield config.get(section, 'uri').strip() % git_name
except NoOptionError:
log.error('%s section is lacking the "repos" or "uri" settings.', section)
| 2,416 | Python | .py | 61 | 32.52459 | 86 | 0.671359 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
531 | access.py | res0nat0r_gitosis/gitosis/access.py | import os, logging
from ConfigParser import NoSectionError, NoOptionError
from fnmatch import fnmatch
from gitosis import group
def pathMatchPatterns(path, repos):
"""
Check existence of given path against list of path patterns
The pattern definition is the as fnmatch.fnmatch.
"""
for repo in repos:
if fnmatch(path, repo):
return True
return False
def haveAccess(config, user, mode, path):
"""
Map request for write access to allowed path.
Note for read-only access, the caller should check for write
access too.
Returns ``None`` for no access, or a tuple of toplevel directory
containing repositories and a relative path to the physical repository.
"""
log = logging.getLogger('gitosis.access.haveAccess')
log.debug(
'Access check for %(user)r as %(mode)r on %(path)r...'
% dict(
user=user,
mode=mode,
path=path,
))
basename, ext = os.path.splitext(path)
if ext == '.git':
log.debug(
'Stripping .git suffix from %(path)r, new value %(basename)r'
% dict(
path=path,
basename=basename,
))
path = basename
sections = ['group %s' % item for item in
group.getMembership(config=config, user=user)]
sections.insert(0, 'user %s' % user)
for sectname in sections:
try:
repos = config.get(sectname, mode)
except (NoSectionError, NoOptionError):
repos = []
else:
repos = repos.split()
mapping = None
if pathMatchPatterns(path, repos):
log.debug(
'Access ok for %(user)r as %(mode)r on %(path)r'
% dict(
user=user,
mode=mode,
path=path,
))
mapping = path
else:
try:
mapping = config.get(sectname,
'map %s %s' % (mode, path))
except (NoSectionError, NoOptionError):
pass
else:
log.debug(
'Access ok for %(user)r as %(mode)r on %(path)r=%(mapping)r'
% dict(
user=user,
mode=mode,
path=path,
mapping=mapping,
))
if mapping is not None:
prefix = None
try:
prefix = config.get(sectname, 'repositories')
except (NoSectionError, NoOptionError):
try:
prefix = config.get('gitosis', 'repositories')
except (NoSectionError, NoOptionError):
prefix = 'repositories'
log.debug(
'Using prefix %(prefix)r for %(path)r'
% dict(
prefix=prefix,
path=mapping,
))
return (prefix, mapping)
| 3,016 | Python | .py | 89 | 22.146067 | 80 | 0.518366 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
532 | gitdaemon.py | res0nat0r_gitosis/gitosis/gitdaemon.py | import errno
import logging
import os
from ConfigParser import NoSectionError, NoOptionError
log = logging.getLogger('gitosis.gitdaemon')
from gitosis import util
from gitosis import access
def export_ok_path(repopath):
p = os.path.join(repopath, 'git-daemon-export-ok')
return p
def allow_export(repopath):
p = export_ok_path(repopath)
file(p, 'a').close()
def deny_export(repopath):
p = export_ok_path(repopath)
try:
os.unlink(p)
except OSError, e:
if e.errno == errno.ENOENT:
pass
else:
raise
def _extract_reldir(topdir, dirpath):
if topdir == dirpath:
return '.'
prefix = topdir + '/'
assert dirpath.startswith(prefix)
reldir = dirpath[len(prefix):]
return reldir
def walk_repos(config):
repositories = util.getRepositoryDir(config)
def _error(e):
if e.errno == errno.ENOENT:
pass
else:
raise e
for (dirpath, dirnames, filenames) \
in os.walk(repositories, onerror=_error):
# oh how many times i have wished for os.walk to report
# topdir and reldir separately, instead of dirpath
reldir = _extract_reldir(
topdir=repositories,
dirpath=dirpath,
)
log.debug('Walking %r, seeing %r', reldir, dirnames)
to_recurse = []
repos = []
for dirname in dirnames:
if dirname.endswith('.git'):
repos.append(dirname)
else:
to_recurse.append(dirname)
dirnames[:] = to_recurse
for repo in repos:
name, ext = os.path.splitext(repo)
if reldir != '.':
name = os.path.join(reldir, name)
assert ext == '.git'
yield (dirpath, repo, name)
def set_export_ok(config):
try:
global_enable = config.getboolean('gitosis', 'daemon')
except (NoSectionError, NoOptionError):
global_enable = False
log.debug(
'Global default is %r',
{True: 'allow', False: 'deny'}.get(global_enable),
)
try:
enable_if_all = config.getboolean('gitosis', 'daemon-if-all')
except (NoSectionError, NoOptionError):
enable_if_all = False
log.debug(
'If accessible to @all: %r',
{True: 'allow', False: 'unchanged'}.get(enable_if_all),
)
for (dirpath, repo, name) in walk_repos(config):
try:
enable = config.getboolean('repo %s' % name, 'daemon')
except (NoSectionError, NoOptionError):
enable = global_enable
if not enable and enable_if_all:
(users,groups,all_refs) = access.getAllAccess(config,name)
enable = ('@all' in all_refs)
if enable:
log.debug('Allow %r', name)
allow_export(os.path.join(dirpath, repo))
else:
log.debug('Deny %r', name)
deny_export(os.path.join(dirpath, repo))
| 3,004 | Python | .py | 90 | 24.922222 | 74 | 0.589637 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
533 | snagit.py | res0nat0r_gitosis/gitosis/snagit.py | from gitosis import access
def list_repos(cfg, user, cmd):
all_repos = []
secs = cfg.sections()
for sec in secs:
for opt in cfg.options(sec):
if ((str(opt) == "writable") or (str(opt) == "writeable")):
ws = cfg.get(sec, opt).split()
for repo in ws:
try:
all_repos.index(repo)
except ValueError, e:
all_repos.append(repo)
elif (str(opt) == "readonly"):
readonlyies = cfg.get(sec, opt)
for repo in readonlyies:
try:
all_repos.index(repo)
except ValueError, e:
all_repos.append(repo)
readonly_repos = []
writable_repos = []
# At this point should have a list of unique repos.
for repo in all_repos:
rs = access.haveAccess(cfg, user, "writable", repo)
if (rs): # has read and write access
writable_repos.append(repo)
else:
rs = access.haveAccess(cfg, user, "readonly", repo)
if (rs): # has read only access
readonly_repos.append(repo)
else: # has no access
pass
for repo in writable_repos:
print "%s, writable" % str(repo)
for repo in readonly_repos:
print "%s, readonly" % str(repo)
| 1,423 | Python | .py | 37 | 25.486486 | 71 | 0.503273 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
534 | app.py | res0nat0r_gitosis/gitosis/app.py | import os
import sys
import logging
import optparse
import errno
import ConfigParser
log = logging.getLogger('gitosis.app')
class CannotReadConfigError(Exception):
"""Unable to read config file"""
def __str__(self):
return '%s: %s' % (self.__doc__, ': '.join(self.args))
class ConfigFileDoesNotExistError(CannotReadConfigError):
"""Configuration does not exist"""
class App(object):
name = None
def run(class_):
app = class_()
return app.main()
run = classmethod(run)
def main(self):
self.setup_basic_logging()
parser = self.create_parser()
(options, args) = parser.parse_args()
cfg = self.create_config(options)
try:
self.read_config(options, cfg)
except CannotReadConfigError, e:
log.error(str(e))
sys.exit(1)
self.setup_logging(cfg)
self.handle_args(parser, cfg, options, args)
def setup_basic_logging(self):
logging.basicConfig()
def create_parser(self):
parser = optparse.OptionParser()
parser.set_defaults(
config=os.path.expanduser('~/.gitosis.conf'),
)
parser.add_option('--config',
metavar='FILE',
help='read config from FILE',
)
return parser
def create_config(self, options):
cfg = ConfigParser.RawConfigParser()
return cfg
def read_config(self, options, cfg):
try:
conffile = file(options.config)
except (IOError, OSError), e:
if e.errno == errno.ENOENT:
# special case this because gitosis-init wants to
# ignore this particular error case
raise ConfigFileDoesNotExistError(str(e))
else:
raise CannotReadConfigError(str(e))
try:
cfg.readfp(conffile)
finally:
conffile.close()
def setup_logging(self, cfg):
try:
loglevel = cfg.get('gitosis', 'loglevel')
except (ConfigParser.NoSectionError,
ConfigParser.NoOptionError):
pass
else:
try:
symbolic = logging._levelNames[loglevel]
except KeyError:
log.warning(
'Ignored invalid loglevel configuration: %r',
loglevel,
)
else:
logging.root.setLevel(symbolic)
def handle_args(self, parser, cfg, options, args):
if args:
parser.error('not expecting arguments')
| 2,647 | Python | .py | 79 | 23.240506 | 65 | 0.567737 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
535 | init.py | res0nat0r_gitosis/gitosis/init.py | """
Initialize a user account for use with gitosis.
"""
import errno
import logging
import os
import sys
from pkg_resources import resource_filename
from cStringIO import StringIO
from ConfigParser import RawConfigParser
from gitosis import repository
from gitosis import run_hook
from gitosis import ssh
from gitosis import util
from gitosis import app
log = logging.getLogger('gitosis.init')
def read_ssh_pubkey(fp=None):
if fp is None:
fp = sys.stdin
line = fp.readline()
return line
class InsecureSSHKeyUsername(Exception):
"""Username contains not allowed characters"""
def __str__(self):
return '%s: %s' % (self.__doc__, ': '.join(self.args))
def ssh_extract_user(pubkey):
_, user = pubkey.rsplit(None, 1)
if ssh.isSafeUsername(user):
return user
else:
raise InsecureSSHKeyUsername(repr(user))
def initial_commit(git_dir, cfg, pubkey, user):
repository.fast_import(
git_dir=git_dir,
commit_msg='Automatic creation of gitosis repository.',
committer='Gitosis Admin <%s>' % user,
files=[
('keydir/%s.pub' % user, pubkey),
('gitosis.conf', cfg),
],
)
def symlink_config(git_dir):
dst = os.path.expanduser('~/.gitosis.conf')
tmp = '%s.%d.tmp' % (dst, os.getpid())
try:
os.unlink(tmp)
except OSError, e:
if e.errno == errno.ENOENT:
pass
else:
raise
os.symlink(
os.path.join(git_dir, 'gitosis.conf'),
tmp,
)
os.rename(tmp, dst)
def init_admin_repository(
git_dir,
pubkey,
user,
):
repository.init(
path=git_dir,
template=resource_filename('gitosis.templates', 'admin')
)
repository.init(
path=git_dir,
)
hook = os.path.join(git_dir, 'hooks', 'post-update')
os.chmod(hook, 0755)
if not repository.has_initial_commit(git_dir):
log.info('Making initial commit...')
# ConfigParser does not guarantee order, so jump through hoops
# to make sure [gitosis] is first
cfg_file = StringIO()
print >>cfg_file, '[gitosis]'
print >>cfg_file
cfg = RawConfigParser()
cfg.add_section('group gitosis-admin')
cfg.set('group gitosis-admin', 'members', user)
cfg.set('group gitosis-admin', 'writable', 'gitosis-admin')
cfg.write(cfg_file)
initial_commit(
git_dir=git_dir,
cfg=cfg_file.getvalue(),
pubkey=pubkey,
user=user,
)
class Main(app.App):
def create_parser(self):
parser = super(Main, self).create_parser()
parser.set_usage('%prog [OPTS]')
parser.set_description(
'Initialize a user account for use with gitosis')
return parser
def read_config(self, *a, **kw):
# ignore errors that result from non-existent config file
try:
super(Main, self).read_config(*a, **kw)
except app.ConfigFileDoesNotExistError:
pass
def handle_args(self, parser, cfg, options, args):
super(Main, self).handle_args(parser, cfg, options, args)
os.umask(0022)
log.info('Reading SSH public key...')
pubkey = read_ssh_pubkey()
user = ssh_extract_user(pubkey)
if user is None:
log.error('Cannot parse user from SSH public key.')
sys.exit(1)
log.info('Admin user is %r', user)
log.info('Creating generated files directory...')
generated = util.getGeneratedFilesDir(config=cfg)
util.mkdir(generated)
log.info('Creating repository structure...')
repositories = util.getRepositoryDir(cfg)
util.mkdir(repositories)
admin_repository = os.path.join(repositories, 'gitosis-admin.git')
init_admin_repository(
git_dir=admin_repository,
pubkey=pubkey,
user=user,
)
log.info('Running post-update hook...')
util.mkdir(os.path.expanduser('~/.ssh'), 0700)
run_hook.post_update(cfg=cfg, git_dir=admin_repository)
log.info('Symlinking ~/.gitosis.conf to repository...')
symlink_config(git_dir=admin_repository)
log.info('Done.')
| 4,301 | Python | .py | 129 | 25.891473 | 74 | 0.618142 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
536 | run_hook.py | res0nat0r_gitosis/gitosis/run_hook.py | """
Perform gitosis actions for a git hook.
"""
from ConfigParser import NoOptionError, NoSectionError
import errno
import logging
import os
import sys
import shutil
from gitosis import repository
from gitosis import ssh
from gitosis import gitweb
from gitosis import gitdaemon
from gitosis import app
from gitosis import util
from gitosis import mirror
def post_update(cfg, git_dir):
export = os.path.join(git_dir, 'gitosis-export')
try:
shutil.rmtree(export)
except OSError, e:
if e.errno == errno.ENOENT:
pass
else:
raise
repository.export(git_dir=git_dir, path=export)
os.rename(
os.path.join(export, 'gitosis.conf'),
os.path.join(export, '..', 'gitosis.conf'),
)
# re-read config to get up-to-date settings
cfg.read(os.path.join(export, '..', 'gitosis.conf'))
gitweb.set_descriptions(
config=cfg,
)
generated = util.getGeneratedFilesDir(config=cfg)
gitweb.generate_project_list(
config=cfg,
path=os.path.join(generated, 'projects.list'),
)
gitdaemon.set_export_ok(
config=cfg,
)
authorized_keys = util.getSSHAuthorizedKeysPath(config=cfg)
ssh.writeAuthorizedKeys(
path=authorized_keys,
keydir=os.path.join(export, 'keydir'),
)
def update_mirrors(cfg, git_dir):
mirror.push_mirrors(cfg, git_dir)
class Main(app.App):
def create_parser(self):
parser = super(Main, self).create_parser()
parser.set_usage('%prog [OPTS] HOOK')
parser.set_description(
'Perform gitosis actions for a git hook')
return parser
def handle_args(self, parser, cfg, options, args):
try:
(hook,) = args
except ValueError:
parser.error('Missing argument HOOK.')
log = logging.getLogger('gitosis.run_hook')
os.umask(0022)
git_dir = os.environ.get('GIT_DIR')
if git_dir is None:
log.error('Must have GIT_DIR set in enviroment')
sys.exit(1)
if hook == 'post-update':
log.info('Running hook %s', hook)
post_update(cfg, git_dir)
log.info('Done.')
elif hook == 'update-mirrors':
log.info('Running hook %s', hook)
update_mirrors(cfg, git_dir)
log.info('Done.')
else:
log.warning('Ignoring unknown hook: %r', hook)
| 2,455 | Python | .py | 78 | 24.397436 | 63 | 0.627376 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
537 | serve.py | res0nat0r_gitosis/gitosis/serve.py | """
Enforce git-shell to only serve allowed by access control policy.
directory. The client should refer to them without any extra directory
prefix. Repository names are forced to match ALLOW_RE.
"""
import logging
import sys, os, re
from gitosis import access
from gitosis import repository
from gitosis import gitweb
from gitosis import gitdaemon
from gitosis import app
from gitosis import util
from gitosis import snagit
log = logging.getLogger('gitosis.serve')
ALLOW_RE = re.compile("^'/*(?P<path>[a-zA-Z0-9][a-zA-Z0-9@._-]*(/[a-zA-Z0-9][a-zA-Z0-9@._-]*)*)'$")
COMMANDS_READONLY = [
'git-upload-pack',
'git upload-pack',
]
COMMANDS_WRITE = [
'git-receive-pack',
'git receive-pack',
]
class ServingError(Exception):
"""Serving error"""
def __str__(self):
return '%s' % self.__doc__
class CommandMayNotContainNewlineError(ServingError):
"""Command may not contain newline"""
class UnknownCommandError(ServingError):
"""Unknown command denied"""
class UnsafeArgumentsError(ServingError):
"""Arguments to command look dangerous"""
class AccessDenied(ServingError):
"""Access denied to repository"""
class WriteAccessDenied(AccessDenied):
"""Repository write access denied"""
class ReadAccessDenied(AccessDenied):
"""Repository read access denied"""
def serve(
cfg,
user,
command,
):
if '\n' in command:
raise CommandMayNotContainNewlineError()
try:
verb, args = command.split(None, 1)
except ValueError:
# all known "git-foo" commands take one argument; improve
# if/when needed
raise UnknownCommandError()
if verb == 'git':
try:
subverb, args = args.split(None, 1)
except ValueError:
# all known "git foo" commands take one argument; improve
# if/when needed
raise UnknownCommandError()
verb = '%s %s' % (verb, subverb)
if (verb not in COMMANDS_WRITE
and verb not in COMMANDS_READONLY):
raise UnknownCommandError()
match = ALLOW_RE.match(args)
if match is None:
raise UnsafeArgumentsError()
path = match.group('path')
# write access is always sufficient
newpath = access.haveAccess(
config=cfg,
user=user,
mode='writable',
path=path)
if newpath is None:
# didn't have write access; try once more with the popular
# misspelling
newpath = access.haveAccess(
config=cfg,
user=user,
mode='writeable',
path=path)
if newpath is not None:
log.warning(
'Repository %r config has typo "writeable", '
+'should be "writable"',
path,
)
if newpath is None:
# didn't have write access
newpath = access.haveAccess(
config=cfg,
user=user,
mode='readonly',
path=path)
if newpath is None:
raise ReadAccessDenied()
if verb in COMMANDS_WRITE:
# didn't have write access and tried to write
raise WriteAccessDenied()
(topdir, relpath) = newpath
assert not relpath.endswith('.git'), \
'git extension should have been stripped: %r' % relpath
repopath = '%s.git' % relpath
fullpath = os.path.join(topdir, repopath)
if (not os.path.exists(fullpath)
and verb in COMMANDS_WRITE):
# it doesn't exist on the filesystem, but the configuration
# refers to it, we're serving a write request, and the user is
# authorized to do that: create the repository on the fly
# create leading directories
p = topdir
for segment in repopath.split(os.sep)[:-1]:
p = os.path.join(p, segment)
util.mkdir(p, 0750)
repository.init(path=fullpath)
gitweb.set_descriptions(
config=cfg,
)
generated = util.getGeneratedFilesDir(config=cfg)
gitweb.generate_project_list(
config=cfg,
path=os.path.join(generated, 'projects.list'),
)
gitdaemon.set_export_ok(
config=cfg,
)
# put the verb back together with the new path
newcmd = "%(verb)s '%(path)s'" % dict(
verb=verb,
path=fullpath,
)
return newcmd
class Main(app.App):
def create_parser(self):
parser = super(Main, self).create_parser()
parser.set_usage('%prog [OPTS] USER')
parser.set_description(
'Allow restricted git operations under DIR')
return parser
def handle_args(self, parser, cfg, options, args):
try:
(user,) = args
except ValueError:
parser.error('Missing argument USER.')
main_log = logging.getLogger('gitosis.serve.main')
os.umask(0022)
cmd = os.environ.get('SSH_ORIGINAL_COMMAND', None)
if cmd is None:
main_log.error('Need SSH_ORIGINAL_COMMAND in environment.')
sys.exit(1)
main_log.debug('Got command %(cmd)r' % dict(
cmd=cmd,
))
os.chdir(os.path.expanduser('~'))
if (cmd == "snagit list-repos"):
try:
snagit.list_repos(cfg, user, cmd)
sys.exit(0)
except Exception, e:
main_log.error('%s', e)
sys.exit(1)
try:
newcmd = serve(
cfg=cfg,
user=user,
command=cmd,
)
except ServingError, e:
main_log.error('%s', e)
sys.exit(1)
main_log.debug('Serving %s', newcmd)
os.execvp('git', ['git', 'shell', '-c', newcmd])
main_log.error('Cannot execute git-shell.')
sys.exit(1)
| 5,899 | Python | .py | 175 | 25.171429 | 99 | 0.596195 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
538 | group.py | res0nat0r_gitosis/gitosis/group.py | import logging
from ConfigParser import NoSectionError, NoOptionError
def _getMembership(config, user, seen):
log = logging.getLogger('gitosis.group.getMembership')
for section in config.sections():
GROUP_PREFIX = 'group '
if not section.startswith(GROUP_PREFIX):
continue
group = section[len(GROUP_PREFIX):]
if group in seen:
continue
try:
members = config.get(section, 'members')
except (NoSectionError, NoOptionError):
members = []
else:
members = members.split()
# @all is the only group where membership needs to be
# bootstrapped like this, anything else gets started from the
# username itself
if (user in members
or '@all' in members):
log.debug('found %(user)r in %(group)r' % dict(
user=user,
group=group,
))
seen.add(group)
yield group
for member_of in _getMembership(
config, '@%s' % group, seen,
):
yield member_of
def getMembership(config, user):
"""
Generate groups ``user`` is member of, according to ``config``
:type config: RawConfigParser
:type user: str
:param _seen: internal use only
"""
seen = set()
for member_of in _getMembership(config, user, seen):
yield member_of
# everyone is always a member of group "all"
yield 'all'
| 1,520 | Python | .py | 44 | 25.113636 | 69 | 0.582253 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
539 | test_serve.py | res0nat0r_gitosis/gitosis/test/test_serve.py | from nose.tools import eq_ as eq
from gitosis.test.util import assert_raises
import logging
import os
from cStringIO import StringIO
from ConfigParser import RawConfigParser
from gitosis import serve
from gitosis import repository
from gitosis.test import util
def test_bad_newLine():
cfg = RawConfigParser()
e = assert_raises(
serve.CommandMayNotContainNewlineError,
serve.serve,
cfg=cfg,
user='jdoe',
command='ev\nil',
)
eq(str(e), 'Command may not contain newline')
assert isinstance(e, serve.ServingError)
def test_bad_dash_noargs():
cfg = RawConfigParser()
e = assert_raises(
serve.UnknownCommandError,
serve.serve,
cfg=cfg,
user='jdoe',
command='git-upload-pack',
)
eq(str(e), 'Unknown command denied')
assert isinstance(e, serve.ServingError)
def test_bad_space_noargs():
cfg = RawConfigParser()
e = assert_raises(
serve.UnknownCommandError,
serve.serve,
cfg=cfg,
user='jdoe',
command='git upload-pack',
)
eq(str(e), 'Unknown command denied')
assert isinstance(e, serve.ServingError)
def test_bad_command():
cfg = RawConfigParser()
e = assert_raises(
serve.UnknownCommandError,
serve.serve,
cfg=cfg,
user='jdoe',
command="evil 'foo'",
)
eq(str(e), 'Unknown command denied')
assert isinstance(e, serve.ServingError)
def test_bad_unsafeArguments_notQuoted():
cfg = RawConfigParser()
e = assert_raises(
serve.UnsafeArgumentsError,
serve.serve,
cfg=cfg,
user='jdoe',
command="git-upload-pack foo",
)
eq(str(e), 'Arguments to command look dangerous')
assert isinstance(e, serve.ServingError)
def test_bad_unsafeArguments_badCharacters():
cfg = RawConfigParser()
e = assert_raises(
serve.UnsafeArgumentsError,
serve.serve,
cfg=cfg,
user='jdoe',
command="git-upload-pack 'ev!l'",
)
eq(str(e), 'Arguments to command look dangerous')
assert isinstance(e, serve.ServingError)
def test_bad_unsafeArguments_dotdot():
cfg = RawConfigParser()
e = assert_raises(
serve.UnsafeArgumentsError,
serve.serve,
cfg=cfg,
user='jdoe',
command="git-upload-pack 'something/../evil'",
)
eq(str(e), 'Arguments to command look dangerous')
assert isinstance(e, serve.ServingError)
def test_bad_forbiddenCommand_read_dash():
cfg = RawConfigParser()
e = assert_raises(
serve.ReadAccessDenied,
serve.serve,
cfg=cfg,
user='jdoe',
command="git-upload-pack 'foo'",
)
eq(str(e), 'Repository read access denied')
assert isinstance(e, serve.AccessDenied)
assert isinstance(e, serve.ServingError)
def test_bad_forbiddenCommand_read_space():
cfg = RawConfigParser()
e = assert_raises(
serve.ReadAccessDenied,
serve.serve,
cfg=cfg,
user='jdoe',
command="git upload-pack 'foo'",
)
eq(str(e), 'Repository read access denied')
assert isinstance(e, serve.AccessDenied)
assert isinstance(e, serve.ServingError)
def test_bad_forbiddenCommand_write_noAccess_dash():
cfg = RawConfigParser()
e = assert_raises(
serve.ReadAccessDenied,
serve.serve,
cfg=cfg,
user='jdoe',
command="git-receive-pack 'foo'",
)
# error message talks about read in an effort to make it more
# obvious that jdoe doesn't have *even* read access
eq(str(e), 'Repository read access denied')
assert isinstance(e, serve.AccessDenied)
assert isinstance(e, serve.ServingError)
def test_bad_forbiddenCommand_write_noAccess_space():
cfg = RawConfigParser()
e = assert_raises(
serve.ReadAccessDenied,
serve.serve,
cfg=cfg,
user='jdoe',
command="git receive-pack 'foo'",
)
# error message talks about read in an effort to make it more
# obvious that jdoe doesn't have *even* read access
eq(str(e), 'Repository read access denied')
assert isinstance(e, serve.AccessDenied)
assert isinstance(e, serve.ServingError)
def test_bad_forbiddenCommand_write_readAccess_dash():
cfg = RawConfigParser()
cfg.add_section('group foo')
cfg.set('group foo', 'members', 'jdoe')
cfg.set('group foo', 'readonly', 'foo')
e = assert_raises(
serve.WriteAccessDenied,
serve.serve,
cfg=cfg,
user='jdoe',
command="git-receive-pack 'foo'",
)
eq(str(e), 'Repository write access denied')
assert isinstance(e, serve.AccessDenied)
assert isinstance(e, serve.ServingError)
def test_bad_forbiddenCommand_write_readAccess_space():
cfg = RawConfigParser()
cfg.add_section('group foo')
cfg.set('group foo', 'members', 'jdoe')
cfg.set('group foo', 'readonly', 'foo')
e = assert_raises(
serve.WriteAccessDenied,
serve.serve,
cfg=cfg,
user='jdoe',
command="git receive-pack 'foo'",
)
eq(str(e), 'Repository write access denied')
assert isinstance(e, serve.AccessDenied)
assert isinstance(e, serve.ServingError)
def test_simple_read_dash():
tmp = util.maketemp()
repository.init(os.path.join(tmp, 'foo.git'))
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('group foo')
cfg.set('group foo', 'members', 'jdoe')
cfg.set('group foo', 'readonly', 'foo')
got = serve.serve(
cfg=cfg,
user='jdoe',
command="git-upload-pack 'foo'",
)
eq(got, "git-upload-pack '%s/foo.git'" % tmp)
def test_simple_read_space():
tmp = util.maketemp()
repository.init(os.path.join(tmp, 'foo.git'))
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('group foo')
cfg.set('group foo', 'members', 'jdoe')
cfg.set('group foo', 'readonly', 'foo')
got = serve.serve(
cfg=cfg,
user='jdoe',
command="git upload-pack 'foo'",
)
eq(got, "git upload-pack '%s/foo.git'" % tmp)
def test_simple_write_dash():
tmp = util.maketemp()
repository.init(os.path.join(tmp, 'foo.git'))
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('group foo')
cfg.set('group foo', 'members', 'jdoe')
cfg.set('group foo', 'writable', 'foo')
got = serve.serve(
cfg=cfg,
user='jdoe',
command="git-receive-pack 'foo'",
)
eq(got, "git-receive-pack '%s/foo.git'" % tmp)
def test_simple_write_space():
tmp = util.maketemp()
repository.init(os.path.join(tmp, 'foo.git'))
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('group foo')
cfg.set('group foo', 'members', 'jdoe')
cfg.set('group foo', 'writable', 'foo')
got = serve.serve(
cfg=cfg,
user='jdoe',
command="git receive-pack 'foo'",
)
eq(got, "git receive-pack '%s/foo.git'" % tmp)
def test_push_inits_if_needed():
# a push to a non-existent repository (but where config authorizes
# you to do that) will create the repository on the fly
tmp = util.maketemp()
cfg = RawConfigParser()
cfg.add_section('gitosis')
repositories = os.path.join(tmp, 'repositories')
os.mkdir(repositories)
cfg.set('gitosis', 'repositories', repositories)
generated = os.path.join(tmp, 'generated')
os.mkdir(generated)
cfg.set('gitosis', 'generate-files-in', generated)
cfg.add_section('group foo')
cfg.set('group foo', 'members', 'jdoe')
cfg.set('group foo', 'writable', 'foo')
serve.serve(
cfg=cfg,
user='jdoe',
command="git-receive-pack 'foo'",
)
eq(os.listdir(repositories), ['foo.git'])
assert os.path.isfile(os.path.join(repositories, 'foo.git', 'HEAD'))
def test_push_inits_if_needed_haveExtension():
# a push to a non-existent repository (but where config authorizes
# you to do that) will create the repository on the fly
tmp = util.maketemp()
cfg = RawConfigParser()
cfg.add_section('gitosis')
repositories = os.path.join(tmp, 'repositories')
os.mkdir(repositories)
cfg.set('gitosis', 'repositories', repositories)
generated = os.path.join(tmp, 'generated')
os.mkdir(generated)
cfg.set('gitosis', 'generate-files-in', generated)
cfg.add_section('group foo')
cfg.set('group foo', 'members', 'jdoe')
cfg.set('group foo', 'writable', 'foo')
serve.serve(
cfg=cfg,
user='jdoe',
command="git-receive-pack 'foo.git'",
)
eq(os.listdir(repositories), ['foo.git'])
assert os.path.isfile(os.path.join(repositories, 'foo.git', 'HEAD'))
def test_push_inits_subdir_parent_missing():
tmp = util.maketemp()
cfg = RawConfigParser()
cfg.add_section('gitosis')
repositories = os.path.join(tmp, 'repositories')
os.mkdir(repositories)
cfg.set('gitosis', 'repositories', repositories)
generated = os.path.join(tmp, 'generated')
os.mkdir(generated)
cfg.set('gitosis', 'generate-files-in', generated)
cfg.add_section('group foo')
cfg.set('group foo', 'members', 'jdoe')
cfg.set('group foo', 'writable', 'foo/bar')
serve.serve(
cfg=cfg,
user='jdoe',
command="git-receive-pack 'foo/bar.git'",
)
eq(os.listdir(repositories), ['foo'])
foo = os.path.join(repositories, 'foo')
util.check_mode(foo, 0750, is_dir=True)
eq(os.listdir(foo), ['bar.git'])
assert os.path.isfile(os.path.join(repositories, 'foo', 'bar.git', 'HEAD'))
def test_push_inits_subdir_parent_exists():
tmp = util.maketemp()
cfg = RawConfigParser()
cfg.add_section('gitosis')
repositories = os.path.join(tmp, 'repositories')
os.mkdir(repositories)
foo = os.path.join(repositories, 'foo')
# silly mode on purpose; not to be touched
os.mkdir(foo, 0751)
cfg.set('gitosis', 'repositories', repositories)
generated = os.path.join(tmp, 'generated')
os.mkdir(generated)
cfg.set('gitosis', 'generate-files-in', generated)
cfg.add_section('group foo')
cfg.set('group foo', 'members', 'jdoe')
cfg.set('group foo', 'writable', 'foo/bar')
serve.serve(
cfg=cfg,
user='jdoe',
command="git-receive-pack 'foo/bar.git'",
)
eq(os.listdir(repositories), ['foo'])
util.check_mode(foo, 0751, is_dir=True)
eq(os.listdir(foo), ['bar.git'])
assert os.path.isfile(os.path.join(repositories, 'foo', 'bar.git', 'HEAD'))
def test_push_inits_if_needed_existsWithExtension():
tmp = util.maketemp()
cfg = RawConfigParser()
cfg.add_section('gitosis')
repositories = os.path.join(tmp, 'repositories')
os.mkdir(repositories)
cfg.set('gitosis', 'repositories', repositories)
cfg.add_section('group foo')
cfg.set('group foo', 'members', 'jdoe')
cfg.set('group foo', 'writable', 'foo')
os.mkdir(os.path.join(repositories, 'foo.git'))
serve.serve(
cfg=cfg,
user='jdoe',
command="git-receive-pack 'foo'",
)
eq(os.listdir(repositories), ['foo.git'])
# it should *not* have HEAD here as we just mkdirred it and didn't
# create it properly, and the mock repo didn't have anything in
# it.. having HEAD implies serve ran git init, which is supposed
# to be unnecessary here
eq(os.listdir(os.path.join(repositories, 'foo.git')), [])
def test_push_inits_no_stdout_spam():
# git init has a tendency to spew to stdout, and that confuses
# e.g. a git push
tmp = util.maketemp()
cfg = RawConfigParser()
cfg.add_section('gitosis')
repositories = os.path.join(tmp, 'repositories')
os.mkdir(repositories)
cfg.set('gitosis', 'repositories', repositories)
generated = os.path.join(tmp, 'generated')
os.mkdir(generated)
cfg.set('gitosis', 'generate-files-in', generated)
cfg.add_section('group foo')
cfg.set('group foo', 'members', 'jdoe')
cfg.set('group foo', 'writable', 'foo')
old_stdout = os.dup(1)
try:
new_stdout = os.tmpfile()
os.dup2(new_stdout.fileno(), 1)
serve.serve(
cfg=cfg,
user='jdoe',
command="git-receive-pack 'foo'",
)
finally:
os.dup2(old_stdout, 1)
os.close(old_stdout)
new_stdout.seek(0)
got = new_stdout.read()
new_stdout.close()
eq(got, '')
eq(os.listdir(repositories), ['foo.git'])
assert os.path.isfile(os.path.join(repositories, 'foo.git', 'HEAD'))
def test_push_inits_sets_description():
tmp = util.maketemp()
cfg = RawConfigParser()
cfg.add_section('gitosis')
repositories = os.path.join(tmp, 'repositories')
os.mkdir(repositories)
cfg.set('gitosis', 'repositories', repositories)
generated = os.path.join(tmp, 'generated')
os.mkdir(generated)
cfg.set('gitosis', 'generate-files-in', generated)
cfg.add_section('group foo')
cfg.set('group foo', 'members', 'jdoe')
cfg.set('group foo', 'writable', 'foo')
cfg.add_section('repo foo')
cfg.set('repo foo', 'description', 'foodesc')
serve.serve(
cfg=cfg,
user='jdoe',
command="git-receive-pack 'foo'",
)
eq(os.listdir(repositories), ['foo.git'])
path = os.path.join(repositories, 'foo.git', 'description')
assert os.path.exists(path)
got = util.readFile(path)
eq(got, 'foodesc\n')
def test_push_inits_updates_projects_list():
tmp = util.maketemp()
cfg = RawConfigParser()
cfg.add_section('gitosis')
repositories = os.path.join(tmp, 'repositories')
os.mkdir(repositories)
cfg.set('gitosis', 'repositories', repositories)
generated = os.path.join(tmp, 'generated')
os.mkdir(generated)
cfg.set('gitosis', 'generate-files-in', generated)
cfg.add_section('group foo')
cfg.set('group foo', 'members', 'jdoe')
cfg.set('group foo', 'writable', 'foo')
cfg.add_section('repo foo')
cfg.set('repo foo', 'gitweb', 'yes')
os.mkdir(os.path.join(repositories, 'gitosis-admin.git'))
serve.serve(
cfg=cfg,
user='jdoe',
command="git-receive-pack 'foo'",
)
eq(
sorted(os.listdir(repositories)),
sorted(['foo.git', 'gitosis-admin.git']),
)
path = os.path.join(generated, 'projects.list')
assert os.path.exists(path)
got = util.readFile(path)
eq(got, 'foo.git\n')
def test_push_inits_sets_export_ok():
tmp = util.maketemp()
cfg = RawConfigParser()
cfg.add_section('gitosis')
repositories = os.path.join(tmp, 'repositories')
os.mkdir(repositories)
cfg.set('gitosis', 'repositories', repositories)
generated = os.path.join(tmp, 'generated')
os.mkdir(generated)
cfg.set('gitosis', 'generate-files-in', generated)
cfg.add_section('group foo')
cfg.set('group foo', 'members', 'jdoe')
cfg.set('group foo', 'writable', 'foo')
cfg.add_section('repo foo')
cfg.set('repo foo', 'daemon', 'yes')
serve.serve(
cfg=cfg,
user='jdoe',
command="git-receive-pack 'foo'",
)
eq(os.listdir(repositories), ['foo.git'])
path = os.path.join(repositories, 'foo.git', 'git-daemon-export-ok')
assert os.path.exists(path)
def test_absolute():
# as the only convenient way to use non-standard SSH ports with
# git is via the ssh://user@host:port/path syntax, and that syntax
# forces absolute urls, just force convert absolute paths to
# relative paths; you'll never really want absolute paths via
# gitosis, anyway.
tmp = util.maketemp()
repository.init(os.path.join(tmp, 'foo.git'))
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('group foo')
cfg.set('group foo', 'members', 'jdoe')
cfg.set('group foo', 'readonly', 'foo')
got = serve.serve(
cfg=cfg,
user='jdoe',
command="git-upload-pack '/foo'",
)
eq(got, "git-upload-pack '%s/foo.git'" % tmp)
def test_typo_writeable():
tmp = util.maketemp()
repository.init(os.path.join(tmp, 'foo.git'))
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('group foo')
cfg.set('group foo', 'members', 'jdoe')
cfg.set('group foo', 'writeable', 'foo')
log = logging.getLogger('gitosis.serve')
buf = StringIO()
handler = logging.StreamHandler(buf)
log.addHandler(handler)
try:
got = serve.serve(
cfg=cfg,
user='jdoe',
command="git-receive-pack 'foo'",
)
finally:
log.removeHandler(handler)
eq(got, "git-receive-pack '%s/foo.git'" % tmp)
handler.flush()
eq(
buf.getvalue(),
"Repository 'foo' config has typo \"writeable\", shou"
+"ld be \"writable\"\n",
)
| 17,216 | Python | .py | 500 | 28.386 | 79 | 0.637579 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
540 | test_repository.py | res0nat0r_gitosis/gitosis/test/test_repository.py | from nose.tools import eq_ as eq
import os
import subprocess
import random
from gitosis import repository
from gitosis.test.util import (
mkdir,
maketemp,
readFile,
writeFile,
check_mode,
assert_raises,
)
def check_bare(path):
# we want it to be a bare repository
assert not os.path.exists(os.path.join(path, '.git'))
def test_init_simple():
tmp = maketemp()
path = os.path.join(tmp, 'repo.git')
repository.init(path)
check_mode(path, 0750, is_dir=True)
check_bare(path)
def test_init_exist_dir():
tmp = maketemp()
path = os.path.join(tmp, 'repo.git')
mkdir(path, 0710)
check_mode(path, 0710, is_dir=True)
repository.init(path)
# my weird access mode is preserved
check_mode(path, 0710, is_dir=True)
check_bare(path)
def test_init_exist_git():
tmp = maketemp()
path = os.path.join(tmp, 'repo.git')
repository.init(path)
repository.init(path)
check_mode(path, 0750, is_dir=True)
check_bare(path)
def test_init_templates():
tmp = maketemp()
path = os.path.join(tmp, 'repo.git')
templatedir = os.path.join(
os.path.dirname(__file__),
'mocktemplates',
)
os.umask(0022)
repository.init(path, template=templatedir)
repository.init(path)
got = readFile(os.path.join(path, 'no-confusion'))
eq(got, 'i should show up\n')
check_mode(
os.path.join(path, 'hooks', 'post-update'),
0755,
is_file=True,
)
got = readFile(os.path.join(path, 'hooks', 'post-update'))
eq(got, '#!/bin/sh\n# i can override standard templates\n')
# Git doesn't create missing hooks
#assert os.path.isfile(os.path.join(path, 'hooks', 'pre-rebase'))
def test_init_default_templates():
tmp = maketemp()
path = os.path.join(tmp, 'repo.git')
repository.init(path)
hook_path = os.path.join(path, 'hooks', 'post-receive')
check_mode(
hook_path,
0755,
is_file=True,
)
got = readFile(hook_path)
eq(got, '#!/bin/sh\nset -e\ngit-update-server-info\ngitosis-run-hook update-mirrors')
def test_init_environment():
tmp = maketemp()
path = os.path.join(tmp, 'repo.git')
mockbindir = os.path.join(tmp, 'mockbin')
os.mkdir(mockbindir)
mockgit = os.path.join(mockbindir, 'git')
writeFile(mockgit, '''\
#!/bin/sh
set -e
# git wrapper for gitosis unit tests
printf '%s' "$GITOSIS_UNITTEST_COOKIE" >"$(dirname "$0")/../cookie"
# strip away my special PATH insert so system git will be found
PATH="${PATH#*:}"
exec git "$@"
''')
os.chmod(mockgit, 0755)
magic_cookie = '%d' % random.randint(1, 100000)
good_path = os.environ['PATH']
try:
os.environ['PATH'] = '%s:%s' % (mockbindir, good_path)
os.environ['GITOSIS_UNITTEST_COOKIE'] = magic_cookie
repository.init(path)
finally:
os.environ['PATH'] = good_path
os.environ.pop('GITOSIS_UNITTEST_COOKIE', None)
eq(
sorted(os.listdir(tmp)),
sorted([
'mockbin',
'cookie',
'repo.git',
]),
)
got = readFile(os.path.join(tmp, 'cookie'))
eq(got, magic_cookie)
def test_fast_import_environment():
tmp = maketemp()
path = os.path.join(tmp, 'repo.git')
repository.init(path=path)
mockbindir = os.path.join(tmp, 'mockbin')
os.mkdir(mockbindir)
mockgit = os.path.join(mockbindir, 'git')
writeFile(mockgit, '''\
#!/bin/sh
set -e
# git wrapper for gitosis unit tests
printf '%s' "$GITOSIS_UNITTEST_COOKIE" >"$(dirname "$0")/../cookie"
# strip away my special PATH insert so system git will be found
PATH="${PATH#*:}"
exec git "$@"
''')
os.chmod(mockgit, 0755)
magic_cookie = '%d' % random.randint(1, 100000)
good_path = os.environ['PATH']
try:
os.environ['PATH'] = '%s:%s' % (mockbindir, good_path)
os.environ['GITOSIS_UNITTEST_COOKIE'] = magic_cookie
repository.fast_import(
git_dir=path,
commit_msg='foo initial bar',
committer='Mr. Unit Test <[email protected]>',
files=[
('foo', 'bar\n'),
],
)
finally:
os.environ['PATH'] = good_path
os.environ.pop('GITOSIS_UNITTEST_COOKIE', None)
eq(
sorted(os.listdir(tmp)),
sorted([
'mockbin',
'cookie',
'repo.git',
]),
)
got = readFile(os.path.join(tmp, 'cookie'))
eq(got, magic_cookie)
def test_export_simple():
tmp = maketemp()
git_dir = os.path.join(tmp, 'repo.git')
repository.init(path=git_dir)
repository.fast_import(
git_dir=git_dir,
committer='John Doe <[email protected]>',
commit_msg="""\
Reverse the polarity of the neutron flow.
Frobitz the quux and eschew obfuscation.
""",
files=[
('foo', 'content'),
('bar/quux', 'another'),
],
)
export = os.path.join(tmp, 'export')
repository.export(git_dir=git_dir, path=export)
eq(sorted(os.listdir(export)),
sorted(['foo', 'bar']))
eq(readFile(os.path.join(export, 'foo')), 'content')
eq(os.listdir(os.path.join(export, 'bar')), ['quux'])
eq(readFile(os.path.join(export, 'bar', 'quux')), 'another')
child = subprocess.Popen(
args=[
'git',
'--git-dir=%s' % git_dir,
'cat-file',
'commit',
'HEAD',
],
cwd=git_dir,
stdout=subprocess.PIPE,
close_fds=True,
)
got = child.stdout.read().splitlines()
returncode = child.wait()
if returncode != 0:
raise RuntimeError('git exit status %d' % returncode)
eq(got[0].split(None, 1)[0], 'tree')
eq(got[1].rsplit(None, 2)[0],
'author John Doe <[email protected]>')
eq(got[2].rsplit(None, 2)[0],
'committer John Doe <[email protected]>')
eq(got[3], '')
eq(got[4], 'Reverse the polarity of the neutron flow.')
eq(got[5], '')
eq(got[6], 'Frobitz the quux and eschew obfuscation.')
eq(got[7:], [])
def test_export_environment():
tmp = maketemp()
git_dir = os.path.join(tmp, 'repo.git')
mockbindir = os.path.join(tmp, 'mockbin')
os.mkdir(mockbindir)
mockgit = os.path.join(mockbindir, 'git')
writeFile(mockgit, '''\
#!/bin/sh
set -e
# git wrapper for gitosis unit tests
printf '%s\n' "$GITOSIS_UNITTEST_COOKIE" >>"$(dirname "$0")/../cookie"
# strip away my special PATH insert so system git will be found
PATH="${PATH#*:}"
exec git "$@"
''')
os.chmod(mockgit, 0755)
repository.init(path=git_dir)
repository.fast_import(
git_dir=git_dir,
committer='John Doe <[email protected]>',
commit_msg="""\
Reverse the polarity of the neutron flow.
Frobitz the quux and eschew obfuscation.
""",
files=[
('foo', 'content'),
('bar/quux', 'another'),
],
)
export = os.path.join(tmp, 'export')
magic_cookie = '%d' % random.randint(1, 100000)
good_path = os.environ['PATH']
try:
os.environ['PATH'] = '%s:%s' % (mockbindir, good_path)
os.environ['GITOSIS_UNITTEST_COOKIE'] = magic_cookie
repository.export(git_dir=git_dir, path=export)
finally:
os.environ['PATH'] = good_path
os.environ.pop('GITOSIS_UNITTEST_COOKIE', None)
got = readFile(os.path.join(tmp, 'cookie'))
eq(
got,
# export runs git twice
'%s\n%s\n' % (magic_cookie, magic_cookie),
)
def test_has_initial_commit_fail_notAGitDir():
tmp = maketemp()
e = assert_raises(
repository.GitRevParseError,
repository.has_initial_commit,
git_dir=tmp)
eq(str(e), 'rev-parse failed: exit status 128')
def test_has_initial_commit_no():
tmp = maketemp()
repository.init(path=tmp)
got = repository.has_initial_commit(git_dir=tmp)
eq(got, False)
def test_has_initial_commit_yes():
tmp = maketemp()
repository.init(path=tmp)
repository.fast_import(
git_dir=tmp,
commit_msg='fakecommit',
committer='John Doe <[email protected]>',
files=[],
)
got = repository.has_initial_commit(git_dir=tmp)
eq(got, True)
def test_has_initial_commit_environment():
tmp = maketemp()
git_dir = os.path.join(tmp, 'repo.git')
mockbindir = os.path.join(tmp, 'mockbin')
os.mkdir(mockbindir)
mockgit = os.path.join(mockbindir, 'git')
writeFile(mockgit, '''\
#!/bin/sh
set -e
# git wrapper for gitosis unit tests
printf '%s' "$GITOSIS_UNITTEST_COOKIE" >"$(dirname "$0")/../cookie"
# strip away my special PATH insert so system git will be found
PATH="${PATH#*:}"
exec git "$@"
''')
os.chmod(mockgit, 0755)
repository.init(path=tmp)
repository.fast_import(
git_dir=tmp,
commit_msg='fakecommit',
committer='John Doe <[email protected]>',
files=[],
)
magic_cookie = '%d' % random.randint(1, 100000)
good_path = os.environ['PATH']
try:
os.environ['PATH'] = '%s:%s' % (mockbindir, good_path)
os.environ['GITOSIS_UNITTEST_COOKIE'] = magic_cookie
got = repository.has_initial_commit(git_dir=tmp)
finally:
os.environ['PATH'] = good_path
os.environ.pop('GITOSIS_UNITTEST_COOKIE', None)
eq(got, True)
got = readFile(os.path.join(tmp, 'cookie'))
eq(got, magic_cookie)
def test_fast_import_parent():
tmp = maketemp()
path = os.path.join(tmp, 'repo.git')
repository.init(path=path)
repository.fast_import(
git_dir=path,
commit_msg='foo initial bar',
committer='Mr. Unit Test <[email protected]>',
files=[
('foo', 'bar\n'),
],
)
repository.fast_import(
git_dir=path,
commit_msg='another',
committer='Sam One Else <[email protected]>',
parent='refs/heads/master^0',
files=[
('quux', 'thud\n'),
],
)
export = os.path.join(tmp, 'export')
repository.export(
git_dir=path,
path=export,
)
eq(sorted(os.listdir(export)),
sorted(['foo', 'quux']))
def test_mirror():
tmp = maketemp()
main_path = os.path.join(tmp, 'main.git')
mirror_path = os.path.join(tmp, 'mirror.git')
repository.init(path=main_path, template=False)
repository.init(path=mirror_path, template=False)
repository.fast_import(
git_dir=main_path,
commit_msg='foo initial bar',
committer='Mr. Unit Test <[email protected]>',
files=[
('foo', 'bar\n'),
],
)
repository.mirror(main_path, mirror_path)
export = os.path.join(tmp, 'export')
repository.export(
git_dir=mirror_path,
path=export,
)
eq(os.listdir(export),
['foo']) | 10,947 | Python | .py | 351 | 24.820513 | 89 | 0.599735 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
541 | test_gitweb.py | res0nat0r_gitosis/gitosis/test/test_gitweb.py | from nose.tools import eq_ as eq
import os
from ConfigParser import RawConfigParser
from cStringIO import StringIO
from gitosis import gitweb
from gitosis.test.util import mkdir, maketemp, readFile, writeFile
def test_projectsList_empty():
cfg = RawConfigParser()
got = StringIO()
gitweb.generate_project_list_fp(
config=cfg,
fp=got)
eq(got.getvalue(), '''\
''')
def test_projectsList_repoDenied():
cfg = RawConfigParser()
cfg.add_section('repo foo/bar')
got = StringIO()
gitweb.generate_project_list_fp(
config=cfg,
fp=got)
eq(got.getvalue(), '''\
''')
def test_projectsList_noOwner():
cfg = RawConfigParser()
cfg.add_section('repo foo/bar')
cfg.set('repo foo/bar', 'gitweb', 'yes')
got = StringIO()
gitweb.generate_project_list_fp(
config=cfg,
fp=got)
eq(got.getvalue(), '''\
foo%2Fbar
''')
def test_projectsList_haveOwner():
cfg = RawConfigParser()
cfg.add_section('repo foo/bar')
cfg.set('repo foo/bar', 'gitweb', 'yes')
cfg.set('repo foo/bar', 'owner', 'John Doe')
got = StringIO()
gitweb.generate_project_list_fp(
config=cfg,
fp=got)
eq(got.getvalue(), '''\
foo%2Fbar John+Doe
''')
def test_projectsList_haveOwnerName():
cfg = RawConfigParser()
cfg.add_section('repo foo/bar')
cfg.set('repo foo/bar', 'gitweb', 'yes')
cfg.set('repo foo/bar', 'owner', 'jdoe')
cfg.add_section('user jdoe')
cfg.set('user jdoe', 'name', 'John Doe')
got = StringIO()
gitweb.generate_project_list_fp(
config=cfg,
fp=got)
eq(got.getvalue(), '''\
foo%2Fbar John+Doe jdoe
''')
def test_projectsList_multiple():
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.add_section('repo foo/bar')
cfg.set('repo foo/bar', 'owner', 'John Doe')
cfg.set('repo foo/bar', 'gitweb', 'yes')
cfg.add_section('repo quux')
cfg.set('repo quux', 'gitweb', 'yes')
got = StringIO()
gitweb.generate_project_list_fp(
config=cfg,
fp=got)
eq(got.getvalue(), '''\
quux
foo%2Fbar John+Doe
''')
def test_projectsList_multiple_globalGitwebYes():
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'gitweb', 'yes')
cfg.add_section('repo foo/bar')
cfg.set('repo foo/bar', 'owner', 'John Doe')
cfg.add_section('repo quux')
# same as default, no effect
cfg.set('repo quux', 'gitweb', 'yes')
cfg.add_section('repo thud')
# this is still hidden
cfg.set('repo thud', 'gitweb', 'no')
got = StringIO()
gitweb.generate_project_list_fp(
config=cfg,
fp=got)
eq(got.getvalue(), '''\
quux
foo%2Fbar John+Doe
''')
def test_projectsList_reallyEndsWithGit():
tmp = maketemp()
path = os.path.join(tmp, 'foo.git')
mkdir(path)
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('repo foo')
cfg.set('repo foo', 'gitweb', 'yes')
got = StringIO()
gitweb.generate_project_list_fp(
config=cfg,
fp=got)
eq(got.getvalue(), '''\
foo.git
''')
def test_projectsList_path():
tmp = maketemp()
path = os.path.join(tmp, 'foo.git')
mkdir(path)
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('repo foo')
cfg.set('repo foo', 'gitweb', 'yes')
projects_list = os.path.join(tmp, 'projects.list')
gitweb.generate_project_list(
config=cfg,
path=projects_list)
got = readFile(projects_list)
eq(got, '''\
foo.git
''')
def test_description_none():
tmp = maketemp()
path = os.path.join(tmp, 'foo.git')
mkdir(path)
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('repo foo')
cfg.set('repo foo', 'description', 'foodesc')
gitweb.set_descriptions(
config=cfg,
)
got = readFile(os.path.join(path, 'description'))
eq(got, 'foodesc\n')
def test_description_repo_missing():
# configured but not created yet; before first push
tmp = maketemp()
path = os.path.join(tmp, 'foo.git')
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('repo foo')
cfg.set('repo foo', 'description', 'foodesc')
gitweb.set_descriptions(
config=cfg,
)
assert not os.path.exists(os.path.join(tmp, 'foo'))
assert not os.path.exists(os.path.join(tmp, 'foo.git'))
def test_description_repo_missing_parent():
# configured but not created yet; before first push
tmp = maketemp()
path = os.path.join(tmp, 'foo/bar.git')
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('repo foo')
cfg.set('repo foo', 'description', 'foodesc')
gitweb.set_descriptions(
config=cfg,
)
assert not os.path.exists(os.path.join(tmp, 'foo'))
def test_description_default():
tmp = maketemp()
path = os.path.join(tmp, 'foo.git')
mkdir(path)
writeFile(
os.path.join(path, 'description'),
'Unnamed repository; edit this file to name it for gitweb.\n',
)
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('repo foo')
cfg.set('repo foo', 'description', 'foodesc')
gitweb.set_descriptions(
config=cfg,
)
got = readFile(os.path.join(path, 'description'))
eq(got, 'foodesc\n')
def test_description_not_set():
tmp = maketemp()
path = os.path.join(tmp, 'foo.git')
mkdir(path)
writeFile(
os.path.join(path, 'description'),
'i was here first\n',
)
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('repo foo')
gitweb.set_descriptions(
config=cfg,
)
got = readFile(os.path.join(path, 'description'))
eq(got, 'i was here first\n')
def test_description_again():
tmp = maketemp()
path = os.path.join(tmp, 'foo.git')
mkdir(path)
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('repo foo')
cfg.set('repo foo', 'description', 'foodesc')
gitweb.set_descriptions(
config=cfg,
)
gitweb.set_descriptions(
config=cfg,
)
got = readFile(os.path.join(path, 'description'))
eq(got, 'foodesc\n')
| 6,594 | Python | .py | 222 | 24.644144 | 70 | 0.634146 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
542 | util.py | res0nat0r_gitosis/gitosis/test/util.py | from nose.tools import eq_ as eq
import errno
import os
import shutil
import stat
import sys
def mkdir(*a, **kw):
try:
os.mkdir(*a, **kw)
except OSError, e:
if e.errno == errno.EEXIST:
pass
else:
raise
def maketemp():
tmp = os.path.join(os.path.dirname(__file__), 'tmp')
mkdir(tmp)
caller = sys._getframe(1)
name = '%s.%s' % (
sys._getframe(1).f_globals['__name__'],
caller.f_code.co_name,
)
tmp = os.path.join(tmp, name)
try:
shutil.rmtree(tmp)
except OSError, e:
if e.errno == errno.ENOENT:
pass
else:
raise
os.mkdir(tmp)
return tmp
def writeFile(path, content):
tmp = '%s.tmp' % path
f = file(tmp, 'w')
try:
f.write(content)
finally:
f.close()
os.rename(tmp, path)
def readFile(path):
f = file(path)
try:
data = f.read()
finally:
f.close()
return data
def assert_raises(excClass, callableObj, *args, **kwargs):
"""
Like unittest.TestCase.assertRaises, but returns the exception.
"""
try:
callableObj(*args, **kwargs)
except excClass, e:
return e
else:
if hasattr(excClass,'__name__'): excName = excClass.__name__
else: excName = str(excClass)
raise AssertionError("%s not raised" % excName)
def check_mode(path, mode, is_file=None, is_dir=None):
st = os.stat(path)
if is_dir:
assert stat.S_ISDIR(st.st_mode)
if is_file:
assert stat.S_ISREG(st.st_mode)
got = stat.S_IMODE(st.st_mode)
eq(got, mode, 'File mode %04o!=%04o for %s' % (got, mode, path))
| 1,692 | Python | .py | 67 | 19.164179 | 68 | 0.575495 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
543 | test_run_hook.py | res0nat0r_gitosis/gitosis/test/test_run_hook.py | from nose.tools import eq_ as eq
import os
from ConfigParser import RawConfigParser
from cStringIO import StringIO
from gitosis import init, repository, run_hook
from gitosis.test.util import maketemp, readFile
def test_post_update_simple():
tmp = maketemp()
repos = os.path.join(tmp, 'repositories')
os.mkdir(repos)
admin_repository = os.path.join(repos, 'gitosis-admin.git')
pubkey = (
'ssh-somealgo '
+'0123456789ABCDEFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= fakeuser@fakehost')
user = 'theadmin'
init.init_admin_repository(
git_dir=admin_repository,
pubkey=pubkey,
user=user,
)
repository.init(path=os.path.join(repos, 'forweb.git'))
repository.init(path=os.path.join(repos, 'fordaemon.git'))
repository.fast_import(
git_dir=admin_repository,
committer='John Doe <[email protected]>',
commit_msg="""\
stuff
""",
parent='refs/heads/master^0',
files=[
('gitosis.conf', """\
[gitosis]
[group gitosis-admin]
members = theadmin
writable = gitosis-admin
[repo fordaemon]
daemon = yes
[repo forweb]
gitweb = yes
owner = John Doe
description = blah blah
"""),
('keydir/jdoe.pub',
'ssh-somealgo '
+'0123456789ABCDEFBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB'
+'BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB'
+'BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB'
+'BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB= [email protected]'),
],
)
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', repos)
generated = os.path.join(tmp, 'generated')
os.mkdir(generated)
cfg.set('gitosis', 'generate-files-in', generated)
ssh = os.path.join(tmp, 'ssh')
os.mkdir(ssh)
cfg.set(
'gitosis',
'ssh-authorized-keys-path',
os.path.join(ssh, 'authorized_keys'),
)
run_hook.post_update(
cfg=cfg,
git_dir=admin_repository,
)
got = readFile(os.path.join(repos, 'forweb.git', 'description'))
eq(got, 'blah blah\n')
got = os.listdir(generated)
eq(got, ['projects.list'])
got = readFile(os.path.join(generated, 'projects.list'))
eq(
got,
"""\
forweb.git John+Doe
""",
)
got = os.listdir(os.path.join(repos, 'fordaemon.git'))
assert 'git-daemon-export-ok' in got, \
"git-daemon-export-ok not created: %r" % got
got = os.listdir(ssh)
eq(got, ['authorized_keys'])
got = readFile(os.path.join(ssh, 'authorized_keys')).splitlines(True)
assert 'command="gitosis-serve jdoe",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty ssh-somealgo 0123456789ABCDEFBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB= [email protected]\n' in got, \
"SSH authorized_keys line for jdoe not found: %r" % got
| 3,299 | Python | .py | 89 | 31 | 369 | 0.693508 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
544 | test_ssh.py | res0nat0r_gitosis/gitosis/test/test_ssh.py | from nose.tools import eq_ as eq, assert_raises
import os
from cStringIO import StringIO
from gitosis import ssh
from gitosis.test.util import mkdir, maketemp, writeFile, readFile
def _key(s):
return ''.join(s.split('\n')).strip()
KEY_1 = _key("""
ssh-rsa +v5XLsUrLsHOKy7Stob1lHZM17YCCNXplcKfbpIztS2PujyixOaBev1ku6H6ny
gUXfuYVzY+PmfTLviSwD3UETxEkR/jlBURACDQARJdUxpgt9XG2Lbs8bhOjonAPapxrH0o
9O8R0Y6Pm1Vh+H2U0B4UBhPgEframpeJYedijBxBV5aq3yUvHkXpcjM/P0gsKqr036k= j
unk@gunk
""")
KEY_2 = _key("""
ssh-rsa 4BX2TxZoD3Og2zNjHwaMhVEa5/NLnPcw+Z02TDR0IGJrrqXk7YlfR3oz+Wb/Eb
Ctli20SoWY0Ur8kBEF/xR4hRslZ2U8t0PAJhr8cq5mifhok/gAdckmSzjD67QJ68uZbga8
ZwIAo7y/BU7cD3Y9UdVZykG34NiijHZLlCBo/TnobXjFIPXvFbfgQ3y8g+akwocFVcQ= f
roop@snoop
""")
class ReadKeys_Test(object):
def test_empty(self):
tmp = maketemp()
empty = os.path.join(tmp, 'empty')
mkdir(empty)
gen = ssh.readKeys(keydir=empty)
assert_raises(StopIteration, gen.next)
def test_ignore_dot(self):
tmp = maketemp()
keydir = os.path.join(tmp, 'ignore_dot')
mkdir(keydir)
writeFile(os.path.join(keydir, '.jdoe.pub'), KEY_1+'\n')
gen = ssh.readKeys(keydir=keydir)
assert_raises(StopIteration, gen.next)
def test_ignore_nonpub(self):
tmp = maketemp()
keydir = os.path.join(tmp, 'ignore_dot')
mkdir(keydir)
writeFile(os.path.join(keydir, 'jdoe.xub'), KEY_1+'\n')
gen = ssh.readKeys(keydir=keydir)
assert_raises(StopIteration, gen.next)
def test_one(self):
tmp = maketemp()
keydir = os.path.join(tmp, 'one')
mkdir(keydir)
writeFile(os.path.join(keydir, 'jdoe.pub'), KEY_1+'\n')
gen = ssh.readKeys(keydir=keydir)
eq(gen.next(), ('jdoe', KEY_1))
assert_raises(StopIteration, gen.next)
def test_two(self):
tmp = maketemp()
keydir = os.path.join(tmp, 'two')
mkdir(keydir)
writeFile(os.path.join(keydir, 'jdoe.pub'), KEY_1+'\n')
writeFile(os.path.join(keydir, 'wsmith.pub'), KEY_2+'\n')
gen = ssh.readKeys(keydir=keydir)
got = frozenset(gen)
eq(got,
frozenset([
('jdoe', KEY_1),
('wsmith', KEY_2),
]))
def test_multiple_lines(self):
tmp = maketemp()
keydir = os.path.join(tmp, 'keys')
mkdir(keydir)
writeFile(os.path.join(keydir, 'jd"oe.pub'), KEY_1+'\n')
gen = ssh.readKeys(keydir=keydir)
got = frozenset(gen)
eq(got, frozenset([]))
def test_bad_filename(self):
tmp = maketemp()
keydir = os.path.join(tmp, 'two')
mkdir(keydir)
writeFile(os.path.join(keydir, 'jdoe.pub'), KEY_1+'\n'+KEY_2+'\n')
gen = ssh.readKeys(keydir=keydir)
got = frozenset(gen)
eq(got,
frozenset([
('jdoe', KEY_1),
('jdoe', KEY_2),
]))
class GenerateAuthorizedKeys_Test(object):
def test_simple(self):
def k():
yield ('jdoe', KEY_1)
yield ('wsmith', KEY_2)
gen = ssh.generateAuthorizedKeys(k())
eq(gen.next(), ssh.COMMENT)
eq(gen.next(), (
'command="gitosis-serve jdoe",no-port-forwarding,no-X11-f'
+'orwarding,no-agent-forwarding,no-pty %s' % KEY_1))
eq(gen.next(), (
'command="gitosis-serve wsmith",no-port-forwarding,no-X11'
+'-forwarding,no-agent-forwarding,no-pty %s' % KEY_2))
assert_raises(StopIteration, gen.next)
class FilterAuthorizedKeys_Test(object):
def run(self, s):
f = StringIO(s)
lines = ssh.filterAuthorizedKeys(f)
got = ''.join(['%s\n' % line for line in lines])
return got
def check_no_change(self, s):
got = self.run(s)
eq(got, s)
def test_notFiltered_comment(self):
self.check_no_change('#comment\n')
def test_notFiltered_junk(self):
self.check_no_change('junk\n')
def test_notFiltered_key(self):
self.check_no_change('%s\n' % KEY_1)
def test_notFiltered_keyWithCommand(self):
s = '''\
command="faketosis-serve wsmith",no-port-forwarding,no-X11-forwardin\
g,no-agent-forwarding,no-pty %(key_1)s
''' % dict(key_1=KEY_1)
self.check_no_change(s)
def test_filter_autogeneratedComment_backwardsCompat(self):
got = self.run('### autogenerated by gitosis, DO NOT EDIT\n')
eq(got, '')
def test_filter_autogeneratedComment_current(self):
got = self.run(ssh.COMMENT+'\n')
eq(got, '')
def test_filter_simple(self):
s = '''\
command="gitosis-serve wsmith",no-port-forwarding,no-X11-forwardin\
g,no-agent-forwarding,no-pty %(key_1)s
''' % dict(key_1=KEY_1)
got = self.run(s)
eq(got, '')
def test_filter_withPath(self):
s = '''\
command="/foo/bar/baz/gitosis-serve wsmith",no-port-forwarding,no-X11-forwardin\
g,no-agent-forwarding,no-pty %(key_1)s
''' % dict(key_1=KEY_1)
got = self.run(s)
eq(got, '')
class WriteAuthorizedKeys_Test(object):
def test_simple(self):
tmp = maketemp()
path = os.path.join(tmp, 'authorized_keys')
f = file(path, 'w')
try:
f.write('''\
# foo
bar
### autogenerated by gitosis, DO NOT EDIT
command="/foo/bar/baz/gitosis-serve wsmith",no-port-forwarding,\
no-X11-forwarding,no-agent-forwarding,no-pty %(key_2)s
baz
''' % dict(key_2=KEY_2))
finally:
f.close()
keydir = os.path.join(tmp, 'one')
mkdir(keydir)
writeFile(os.path.join(keydir, 'jdoe.pub'), KEY_1+'\n')
ssh.writeAuthorizedKeys(
path=path, keydir=keydir)
got = readFile(path)
eq(got, '''\
# foo
bar
baz
### autogenerated by gitosis, DO NOT EDIT
command="gitosis-serve jdoe",no-port-forwarding,\
no-X11-forwarding,no-agent-forwarding,no-pty %(key_1)s
''' % dict(key_1=KEY_1))
| 5,956 | Python | .py | 166 | 28.921687 | 80 | 0.623284 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
545 | test_gitdaemon.py | res0nat0r_gitosis/gitosis/test/test_gitdaemon.py | from nose.tools import eq_ as eq
import os
from ConfigParser import RawConfigParser
from gitosis import gitdaemon
from gitosis.test.util import maketemp, writeFile
def exported(path):
assert os.path.isdir(path)
p = gitdaemon.export_ok_path(path)
return os.path.exists(p)
def test_git_daemon_export_ok_repo_missing():
# configured but not created yet; before first push
tmp = maketemp()
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('repo foo')
cfg.set('repo foo', 'daemon', 'yes')
gitdaemon.set_export_ok(config=cfg)
assert not os.path.exists(os.path.join(tmp, 'foo'))
assert not os.path.exists(os.path.join(tmp, 'foo.git'))
def test_git_daemon_export_ok_repo_missing_parent():
# configured but not created yet; before first push
tmp = maketemp()
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('repo foo/bar')
cfg.set('repo foo/bar', 'daemon', 'yes')
gitdaemon.set_export_ok(config=cfg)
assert not os.path.exists(os.path.join(tmp, 'foo'))
def test_git_daemon_export_ok_allowed():
tmp = maketemp()
path = os.path.join(tmp, 'foo.git')
os.mkdir(path)
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('repo foo')
cfg.set('repo foo', 'daemon', 'yes')
gitdaemon.set_export_ok(config=cfg)
eq(exported(path), True)
def test_git_daemon_export_ok_allowed_already():
tmp = maketemp()
path = os.path.join(tmp, 'foo.git')
os.mkdir(path)
writeFile(gitdaemon.export_ok_path(path), '')
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('repo foo')
cfg.set('repo foo', 'daemon', 'yes')
gitdaemon.set_export_ok(config=cfg)
eq(exported(path), True)
def test_git_daemon_export_ok_denied():
tmp = maketemp()
path = os.path.join(tmp, 'foo.git')
os.mkdir(path)
writeFile(gitdaemon.export_ok_path(path), '')
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('repo foo')
cfg.set('repo foo', 'daemon', 'no')
gitdaemon.set_export_ok(config=cfg)
eq(exported(path), False)
def test_git_daemon_export_ok_denied_already():
tmp = maketemp()
path = os.path.join(tmp, 'foo.git')
os.mkdir(path)
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('repo foo')
cfg.set('repo foo', 'daemon', 'no')
gitdaemon.set_export_ok(config=cfg)
eq(exported(path), False)
def test_git_daemon_export_ok_subdirs():
tmp = maketemp()
foo = os.path.join(tmp, 'foo')
os.mkdir(foo)
path = os.path.join(foo, 'bar.git')
os.mkdir(path)
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('repo foo/bar')
cfg.set('repo foo/bar', 'daemon', 'yes')
gitdaemon.set_export_ok(config=cfg)
eq(exported(path), True)
def test_git_daemon_export_ok_denied_default():
tmp = maketemp()
path = os.path.join(tmp, 'foo.git')
os.mkdir(path)
writeFile(gitdaemon.export_ok_path(path), '')
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.add_section('repo foo')
gitdaemon.set_export_ok(config=cfg)
eq(exported(path), False)
def test_git_daemon_export_ok_denied_even_not_configured():
# repositories not mentioned in config also get touched; this is
# to avoid security trouble, otherwise we might expose (or
# continue to expose) old repositories removed from config
tmp = maketemp()
path = os.path.join(tmp, 'foo.git')
os.mkdir(path)
writeFile(gitdaemon.export_ok_path(path), '')
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
gitdaemon.set_export_ok(config=cfg)
eq(exported(path), False)
def test_git_daemon_export_ok_allowed_global():
tmp = maketemp()
for repo in [
'foo.git',
'quux.git',
'thud.git',
]:
path = os.path.join(tmp, repo)
os.mkdir(path)
# try to provoke an invalid allow
writeFile(gitdaemon.export_ok_path(os.path.join(tmp, 'thud.git')), '')
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.set('gitosis', 'daemon', 'yes')
cfg.add_section('repo foo')
cfg.add_section('repo quux')
# same as default, no effect
cfg.set('repo quux', 'daemon', 'yes')
cfg.add_section('repo thud')
# this is still hidden
cfg.set('repo thud', 'daemon', 'no')
gitdaemon.set_export_ok(config=cfg)
eq(exported(os.path.join(tmp, 'foo.git')), True)
eq(exported(os.path.join(tmp, 'quux.git')), True)
eq(exported(os.path.join(tmp, 'thud.git')), False)
def test_git_daemon_export_ok_allowed_all():
tmp = maketemp()
for repo in [
'foo.git',
'quux.git',
'thud.git',
]:
path = os.path.join(tmp, repo)
os.mkdir(path)
# try to provoke an invalid allow
writeFile(gitdaemon.export_ok_path(os.path.join(tmp, 'thud.git')), '')
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', tmp)
cfg.set('gitosis', 'daemon-if-all', 'yes')
cfg.add_section('group all')
cfg.set('group all', 'readonly', 'foo')
cfg.add_section('group boo')
cfg.set('group boo', 'members', '@all')
cfg.set('group boo', 'readonly', 'quux thud')
cfg.add_section('repo thud')
# this is still hidden
cfg.set('repo thud', 'daemon', 'no')
gitdaemon.set_export_ok(config=cfg)
eq(exported(os.path.join(tmp, 'foo.git')), True)
eq(exported(os.path.join(tmp, 'quux.git')), True)
eq(exported(os.path.join(tmp, 'thud.git')), False)
| 6,018 | Python | .py | 166 | 31.253012 | 74 | 0.657579 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
546 | test_mirror.py | res0nat0r_gitosis/gitosis/test/test_mirror.py | from nose.tools import eq_ as eq, ok_ as ok
from ConfigParser import RawConfigParser
from gitosis import mirror, repository
from gitosis.test.util import maketemp
import os
def get_config():
cfg = RawConfigParser()
cfg.add_section('repo foo')
cfg.set('repo foo', 'mirrors', '/var/bar /var/baz')
return cfg
def test_get_mirrors():
cfg = get_config()
cfg.add_section('mirror github')
cfg.set('mirror github', 'repos', 'foo bar')
cfg.set('mirror github', 'uri', '[email protected]:res0nat0r/%s.git')
mirrors = list(mirror.get_mirrors(cfg, 'foo'))
ok('/var/bar' in mirrors)
ok('/var/baz' in mirrors)
ok('[email protected]:res0nat0r/foo.git' in mirrors)
eq(3, len(mirrors))
mirrors = list(mirror.get_mirrors(cfg, 'bar'))
ok('[email protected]:res0nat0r/bar.git' in mirrors)
eq(1, len(mirrors))
def test_get_mirrors_with_all():
cfg = get_config()
mirrors = list(mirror.get_mirrors(cfg, 'baz'))
eq(0, len(mirrors))
cfg.add_section('mirror github')
cfg.set('mirror github', 'repos', '@all')
cfg.set('mirror github', 'uri', '[email protected]:res0nat0r/%s.git')
mirrors = list(mirror.get_mirrors(cfg, 'baz'))
ok('[email protected]:res0nat0r/baz.git' in mirrors)
eq(1, len(mirrors))
def test_get_git_name():
eq('foo', mirror.get_git_name('/home/git/repository', '/home/git/repository/foo.git'))
def test_push_mirrors():
tmp = maketemp()
foo_path = os.path.join(tmp, 'foo.git')
bar_path = os.path.join(tmp, 'bar.git')
baz_path = os.path.join(tmp, 'baz.git')
repository.init(path=foo_path, template=False)
repository.init(path=bar_path, template=False)
repository.init(path=baz_path, template=False)
repository.fast_import(
git_dir=foo_path,
commit_msg='foo initial bar',
committer='Mr. Unit Test <[email protected]>',
files=[
('foo', 'bar\n'),
],
)
cfg = get_config()
cfg.set('repo foo', 'mirrors', ' '.join([bar_path,baz_path]))
mirror.push_mirrors(cfg, foo_path)
export_bar = os.path.join(tmp, 'export_bar')
export_baz = os.path.join(tmp, 'export_baz')
repository.export(
git_dir=bar_path,
path=export_bar,
)
repository.export(
git_dir=baz_path,
path=export_baz,
)
eq(os.listdir(export_bar),
['foo'])
eq(os.listdir(export_baz),
['foo']) | 2,439 | Python | .py | 68 | 30.132353 | 90 | 0.638688 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
547 | test_access.py | res0nat0r_gitosis/gitosis/test/test_access.py | from nose.tools import eq_ as eq
import logging
from ConfigParser import RawConfigParser
from gitosis import access
def test_write_no_simple():
cfg = RawConfigParser()
eq(access.haveAccess(config=cfg, user='jdoe', mode='writable', path='foo/bar'),
None)
def test_write_yes_simple():
cfg = RawConfigParser()
cfg.add_section('group fooers')
cfg.set('group fooers', 'members', 'jdoe')
cfg.set('group fooers', 'writable', 'foo/bar')
eq(access.haveAccess(config=cfg, user='jdoe', mode='writable', path='foo/bar'),
('repositories', 'foo/bar'))
def test_write_no_simple_wouldHaveReadonly():
cfg = RawConfigParser()
cfg.add_section('group fooers')
cfg.set('group fooers', 'members', 'jdoe')
cfg.set('group fooers', 'readonly', 'foo/bar')
eq(access.haveAccess(config=cfg, user='jdoe', mode='writable', path='foo/bar'),
None)
def test_write_yes_map():
cfg = RawConfigParser()
cfg.add_section('group fooers')
cfg.set('group fooers', 'members', 'jdoe')
cfg.set('group fooers', 'map writable foo/bar', 'quux/thud')
eq(access.haveAccess(config=cfg, user='jdoe', mode='writable', path='foo/bar'),
('repositories', 'quux/thud'))
def test_write_no_map_wouldHaveReadonly():
cfg = RawConfigParser()
cfg.add_section('group fooers')
cfg.set('group fooers', 'members', 'jdoe')
cfg.set('group fooers', 'map readonly foo/bar', 'quux/thud')
eq(access.haveAccess(config=cfg, user='jdoe', mode='writable', path='foo/bar'),
None)
def test_read_no_simple():
cfg = RawConfigParser()
eq(access.haveAccess(config=cfg, user='jdoe', mode='readonly', path='foo/bar'),
None)
def test_read_yes_simple():
cfg = RawConfigParser()
cfg.add_section('group fooers')
cfg.set('group fooers', 'members', 'jdoe')
cfg.set('group fooers', 'readonly', 'foo/bar')
eq(access.haveAccess(config=cfg, user='jdoe', mode='readonly', path='foo/bar'),
('repositories', 'foo/bar'))
def test_read_yes_simple_wouldHaveWritable():
cfg = RawConfigParser()
cfg.add_section('group fooers')
cfg.set('group fooers', 'members', 'jdoe')
cfg.set('group fooers', 'writable', 'foo/bar')
eq(access.haveAccess(config=cfg, user='jdoe', mode='readonly', path='foo/bar'),
None)
def test_read_yes_map():
cfg = RawConfigParser()
cfg.add_section('group fooers')
cfg.set('group fooers', 'members', 'jdoe')
cfg.set('group fooers', 'map readonly foo/bar', 'quux/thud')
eq(access.haveAccess(config=cfg, user='jdoe', mode='readonly', path='foo/bar'),
('repositories', 'quux/thud'))
def test_read_yes_map_wouldHaveWritable():
cfg = RawConfigParser()
cfg.add_section('group fooers')
cfg.set('group fooers', 'members', 'jdoe')
cfg.set('group fooers', 'map writable foo/bar', 'quux/thud')
eq(access.haveAccess(config=cfg, user='jdoe', mode='readonly', path='foo/bar'),
None)
def test_read_yes_all():
cfg = RawConfigParser()
cfg.add_section('group fooers')
cfg.set('group fooers', 'members', '@all')
cfg.set('group fooers', 'readonly', 'foo/bar')
eq(access.haveAccess(config=cfg, user='jdoe', mode='readonly', path='foo/bar'),
('repositories', 'foo/bar'))
def test_base_global_absolute():
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', '/a/leading/path')
cfg.add_section('group fooers')
cfg.set('group fooers', 'members', 'jdoe')
cfg.set('group fooers', 'map writable foo/bar', 'baz/quux/thud')
eq(access.haveAccess(
config=cfg, user='jdoe', mode='writable', path='foo/bar'),
('/a/leading/path', 'baz/quux/thud'))
def test_base_global_relative():
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', 'some/relative/path')
cfg.add_section('group fooers')
cfg.set('group fooers', 'members', 'jdoe')
cfg.set('group fooers', 'map writable foo/bar', 'baz/quux/thud')
eq(access.haveAccess(
config=cfg, user='jdoe', mode='writable', path='foo/bar'),
('some/relative/path', 'baz/quux/thud'))
def test_base_global_relative_simple():
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', 'some/relative/path')
cfg.add_section('group fooers')
cfg.set('group fooers', 'members', 'jdoe')
cfg.set('group fooers', 'readonly', 'foo xyzzy bar')
eq(access.haveAccess(
config=cfg, user='jdoe', mode='readonly', path='xyzzy'),
('some/relative/path', 'xyzzy'))
def test_base_global_unset():
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.add_section('group fooers')
cfg.set('group fooers', 'members', 'jdoe')
cfg.set('group fooers', 'readonly', 'foo xyzzy bar')
eq(access.haveAccess(
config=cfg, user='jdoe', mode='readonly', path='xyzzy'),
('repositories', 'xyzzy'))
def test_user():
cfg = RawConfigParser()
cfg.add_section('user jdoe')
cfg.set('user jdoe', 'readonly', 'foo xyzzy bar')
eq(access.haveAccess(
config=cfg, user='jdoe', mode='readonly', path='xyzzy'),
('repositories', 'xyzzy'))
def test_base_local():
cfg = RawConfigParser()
cfg.add_section('group fooers')
cfg.set('group fooers', 'repositories', 'some/relative/path')
cfg.set('group fooers', 'members', 'jdoe')
cfg.set('group fooers', 'map writable foo/bar', 'baz/quux/thud')
eq(access.haveAccess(
config=cfg, user='jdoe', mode='writable', path='foo/bar'),
('some/relative/path', 'baz/quux/thud'))
def test_dotgit():
# a .git extension is always allowed to be added
cfg = RawConfigParser()
cfg.add_section('group fooers')
cfg.set('group fooers', 'members', 'jdoe')
cfg.set('group fooers', 'writable', 'foo/bar')
eq(access.haveAccess(config=cfg, user='jdoe', mode='writable', path='foo/bar.git'),
('repositories', 'foo/bar'))
| 5,954 | Python | .py | 138 | 38.072464 | 87 | 0.657005 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
548 | test_init.py | res0nat0r_gitosis/gitosis/test/test_init.py | from nose.tools import eq_ as eq
from gitosis.test.util import assert_raises, maketemp
import os
from ConfigParser import RawConfigParser
from gitosis import init
from gitosis import repository
from gitosis.test import util
def test_ssh_extract_user_simple():
got = init.ssh_extract_user(
'ssh-somealgo '
+'0123456789ABCDEFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= fakeuser@fakehost')
eq(got, 'fakeuser@fakehost')
def test_ssh_extract_user_domain():
got = init.ssh_extract_user(
'ssh-somealgo '
+'0123456789ABCDEFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= [email protected]')
eq(got, '[email protected]')
def test_ssh_extract_user_domain_dashes():
got = init.ssh_extract_user(
'ssh-somealgo '
+'0123456789ABCDEFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= [email protected]')
eq(got, '[email protected]')
def test_ssh_extract_user_underscore():
got = init.ssh_extract_user(
'ssh-somealgo '
+'0123456789ABCDEFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= [email protected]')
eq(got, '[email protected]')
def test_ssh_extract_user_dot():
got = init.ssh_extract_user(
'ssh-somealgo '
+'0123456789ABCDEFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= [email protected]')
eq(got, '[email protected]')
def test_ssh_extract_user_dash():
got = init.ssh_extract_user(
'ssh-somealgo '
+'0123456789ABCDEFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= [email protected]')
eq(got, '[email protected]')
def test_ssh_extract_user_no_at():
got = init.ssh_extract_user(
'ssh-somealgo '
+'0123456789ABCDEFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= fakeuser')
eq(got, 'fakeuser')
def test_ssh_extract_user_caps():
got = init.ssh_extract_user(
'ssh-somealgo '
+'0123456789ABCDEFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= [email protected]')
eq(got, '[email protected]')
def test_ssh_extract_user_bad():
e = assert_raises(
init.InsecureSSHKeyUsername,
init.ssh_extract_user,
'ssh-somealgo AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= ER3%#@e%')
eq(str(e), "Username contains not allowed characters: 'ER3%#@e%'")
def test_init_admin_repository():
tmp = maketemp()
admin_repository = os.path.join(tmp, 'admin.git')
pubkey = (
'ssh-somealgo '
+'0123456789ABCDEFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= fakeuser@fakehost')
user = 'jdoe'
init.init_admin_repository(
git_dir=admin_repository,
pubkey=pubkey,
user=user,
)
eq(os.listdir(tmp), ['admin.git'])
hook = os.path.join(
tmp,
'admin.git',
'hooks',
'post-update',
)
util.check_mode(hook, 0755, is_file=True)
got = util.readFile(hook).splitlines()
assert 'gitosis-run-hook post-update' in got
export_dir = os.path.join(tmp, 'export')
repository.export(git_dir=admin_repository,
path=export_dir)
eq(sorted(os.listdir(export_dir)),
sorted(['gitosis.conf', 'keydir']))
eq(os.listdir(os.path.join(export_dir, 'keydir')),
['jdoe.pub'])
got = util.readFile(
os.path.join(export_dir, 'keydir', 'jdoe.pub'))
eq(got, pubkey)
# the only thing guaranteed of initial config file ordering is
# that [gitosis] is first
got = util.readFile(os.path.join(export_dir, 'gitosis.conf'))
got = got.splitlines()[0]
eq(got, '[gitosis]')
cfg = RawConfigParser()
cfg.read(os.path.join(export_dir, 'gitosis.conf'))
eq(sorted(cfg.sections()),
sorted([
'gitosis',
'group gitosis-admin',
]))
eq(cfg.items('gitosis'), [])
eq(sorted(cfg.items('group gitosis-admin')),
sorted([
('writable', 'gitosis-admin'),
('members', 'jdoe'),
]))
| 5,907 | Python | .py | 133 | 37.586466 | 86 | 0.728693 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
549 | test_group.py | res0nat0r_gitosis/gitosis/test/test_group.py | from nose.tools import eq_ as eq, assert_raises
from ConfigParser import RawConfigParser
from gitosis import group
def test_no_emptyConfig():
cfg = RawConfigParser()
gen = group.getMembership(config=cfg, user='jdoe')
eq(gen.next(), 'all')
assert_raises(StopIteration, gen.next)
def test_no_emptyGroup():
cfg = RawConfigParser()
cfg.add_section('group hackers')
gen = group.getMembership(config=cfg, user='jdoe')
eq(gen.next(), 'all')
assert_raises(StopIteration, gen.next)
def test_no_notListed():
cfg = RawConfigParser()
cfg.add_section('group hackers')
cfg.set('group hackers', 'members', 'wsmith')
gen = group.getMembership(config=cfg, user='jdoe')
eq(gen.next(), 'all')
assert_raises(StopIteration, gen.next)
def test_yes_simple():
cfg = RawConfigParser()
cfg.add_section('group hackers')
cfg.set('group hackers', 'members', 'jdoe')
gen = group.getMembership(config=cfg, user='jdoe')
eq(gen.next(), 'hackers')
eq(gen.next(), 'all')
assert_raises(StopIteration, gen.next)
def test_yes_leading():
cfg = RawConfigParser()
cfg.add_section('group hackers')
cfg.set('group hackers', 'members', 'jdoe wsmith')
gen = group.getMembership(config=cfg, user='jdoe')
eq(gen.next(), 'hackers')
eq(gen.next(), 'all')
assert_raises(StopIteration, gen.next)
def test_yes_trailing():
cfg = RawConfigParser()
cfg.add_section('group hackers')
cfg.set('group hackers', 'members', 'wsmith jdoe')
gen = group.getMembership(config=cfg, user='jdoe')
eq(gen.next(), 'hackers')
eq(gen.next(), 'all')
assert_raises(StopIteration, gen.next)
def test_yes_middle():
cfg = RawConfigParser()
cfg.add_section('group hackers')
cfg.set('group hackers', 'members', 'wsmith jdoe danny')
gen = group.getMembership(config=cfg, user='jdoe')
eq(gen.next(), 'hackers')
eq(gen.next(), 'all')
assert_raises(StopIteration, gen.next)
def test_yes_recurse_one():
cfg = RawConfigParser()
cfg.add_section('group hackers')
cfg.set('group hackers', 'members', 'wsmith @smackers')
cfg.add_section('group smackers')
cfg.set('group smackers', 'members', 'danny jdoe')
gen = group.getMembership(config=cfg, user='jdoe')
eq(gen.next(), 'smackers')
eq(gen.next(), 'hackers')
eq(gen.next(), 'all')
assert_raises(StopIteration, gen.next)
def test_yes_recurse_one_ordering():
cfg = RawConfigParser()
cfg.add_section('group smackers')
cfg.set('group smackers', 'members', 'danny jdoe')
cfg.add_section('group hackers')
cfg.set('group hackers', 'members', 'wsmith @smackers')
gen = group.getMembership(config=cfg, user='jdoe')
eq(gen.next(), 'smackers')
eq(gen.next(), 'hackers')
eq(gen.next(), 'all')
assert_raises(StopIteration, gen.next)
def test_yes_recurse_three():
cfg = RawConfigParser()
cfg.add_section('group hackers')
cfg.set('group hackers', 'members', 'wsmith @smackers')
cfg.add_section('group smackers')
cfg.set('group smackers', 'members', 'danny @snackers')
cfg.add_section('group snackers')
cfg.set('group snackers', 'members', '@whackers foo')
cfg.add_section('group whackers')
cfg.set('group whackers', 'members', 'jdoe')
gen = group.getMembership(config=cfg, user='jdoe')
eq(gen.next(), 'whackers')
eq(gen.next(), 'snackers')
eq(gen.next(), 'smackers')
eq(gen.next(), 'hackers')
eq(gen.next(), 'all')
assert_raises(StopIteration, gen.next)
def test_yes_recurse_junk():
cfg = RawConfigParser()
cfg.add_section('group hackers')
cfg.set('group hackers', 'members', '@notexist @smackers')
cfg.add_section('group smackers')
cfg.set('group smackers', 'members', 'jdoe')
gen = group.getMembership(config=cfg, user='jdoe')
eq(gen.next(), 'smackers')
eq(gen.next(), 'hackers')
eq(gen.next(), 'all')
assert_raises(StopIteration, gen.next)
def test_yes_recurse_loop():
cfg = RawConfigParser()
cfg.add_section('group hackers')
cfg.set('group hackers', 'members', '@smackers')
cfg.add_section('group smackers')
cfg.set('group smackers', 'members', '@hackers jdoe')
gen = group.getMembership(config=cfg, user='jdoe')
eq(gen.next(), 'smackers')
eq(gen.next(), 'hackers')
eq(gen.next(), 'all')
assert_raises(StopIteration, gen.next)
def test_no_recurse_loop():
cfg = RawConfigParser()
cfg.add_section('group hackers')
cfg.set('group hackers', 'members', '@smackers')
cfg.add_section('group smackers')
cfg.set('group smackers', 'members', '@hackers')
gen = group.getMembership(config=cfg, user='jdoe')
eq(gen.next(), 'all')
assert_raises(StopIteration, gen.next)
| 4,750 | Python | .py | 123 | 34.01626 | 62 | 0.670642 | res0nat0r/gitosis | 1,323 | 382 | 9 | GPL-2.0 | 9/5/2024, 5:07:13 PM (Europe/Amsterdam) |
550 | setup.py | paramiko_paramiko/setup.py | # Copyright (C) 2003-2008 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Suite 500, Boston, MA 02110-1335 USA.
import sys
from setuptools import setup
if sys.platform == "darwin":
import setup_helper
setup_helper.install_custom_make_tarball()
long_description = open("README.rst").read()
# Version info -- read without importing
_locals = {}
with open("paramiko/_version.py") as fp:
exec(fp.read(), None, _locals)
version = _locals["__version__"]
# Have to build extras_require dynamically because it doesn't allow
# self-referencing and I hate repeating myself.
extras_require = {
"gssapi": [
"pyasn1>=0.1.7",
'gssapi>=1.4.1;platform_system!="Windows"',
'pywin32>=2.1.8;platform_system=="Windows"',
],
"invoke": ["invoke>=2.0"],
# TODO 4.0: remove entrypoint as irrelevant
"ed25519": [],
}
everything = []
for subdeps in extras_require.values():
everything.extend(subdeps)
extras_require["all"] = everything
setup(
name="paramiko",
version=version,
description="SSH2 protocol library",
long_description=long_description,
author="Jeff Forcier",
author_email="[email protected]",
url="https://paramiko.org",
project_urls={
"Docs": "https://docs.paramiko.org",
"Source": "https://github.com/paramiko/paramiko",
"Issues": "https://github.com/paramiko/paramiko/issues",
"Changelog": "https://www.paramiko.org/changelog.html",
"CI": "https://app.circleci.com/pipelines/github/paramiko/paramiko",
},
packages=["paramiko"],
license="LGPL",
platforms="Posix; MacOS X; Windows",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: "
"GNU Library or Lesser General Public License (LGPL)",
"Operating System :: OS Independent",
"Topic :: Internet",
"Topic :: Security :: Cryptography",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
],
python_requires=">=3.6",
install_requires=[
"bcrypt>=3.2",
"cryptography>=3.3",
"pynacl>=1.5",
],
extras_require=extras_require,
)
| 3,289 | Python | .py | 88 | 32.613636 | 79 | 0.669693 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
551 | tasks.py | paramiko_paramiko/tasks.py | import os
from pathlib import Path
from os.path import join
from shutil import rmtree, copytree
from invoke import Collection, task
from invocations import checks
from invocations.docs import docs, www, sites, watch_docs
from invocations.packaging.release import ns as release_coll, publish
from invocations.testing import count_errors
# TODO: this screams out for the invoke missing-feature of "I just wrap task X,
# assume its signature by default" (even if that is just **kwargs support)
@task
def test(
ctx,
verbose=True,
color=True,
capture="sys",
module=None,
k=None,
x=False,
opts="",
coverage=False,
include_slow=False,
loop_on_fail=False,
):
"""
Run unit tests via pytest.
By default, known-slow parts of the suite are SKIPPED unless
``--include-slow`` is given. (Note that ``--include-slow`` does not mesh
well with explicit ``--opts="-m=xxx"`` - if ``-m`` is found in ``--opts``,
``--include-slow`` will be ignored!)
"""
if verbose and "--verbose" not in opts and "-v" not in opts:
opts += " --verbose"
# TODO: forget why invocations.pytest added this; is it to force color when
# running headless? Probably?
if color:
opts += " --color=yes"
opts += " --capture={}".format(capture)
if "-m" not in opts and not include_slow:
opts += " -m 'not slow'"
if k is not None and not ("-k" in opts if opts else False):
opts += " -k {}".format(k)
if x and not ("-x" in opts if opts else False):
opts += " -x"
if loop_on_fail and not ("-f" in opts if opts else False):
opts += " -f"
modstr = ""
if module is not None:
base = f"{module}.py"
tests = Path("tests")
legacy = tests / f"test_{base}"
modstr = str(legacy if legacy.exists() else tests / base)
# Switch runner depending on coverage or no coverage.
# TODO: get pytest's coverage plugin working, IIRC it has issues?
runner = "pytest"
if coverage:
# Leverage how pytest can be run as 'python -m pytest', and then how
# coverage can be told to run things in that manner instead of
# expecting a literal .py file.
runner = "coverage run -m pytest"
# Strip SSH_AUTH_SOCK from parent env to avoid pollution by interactive
# users.
# TODO: once pytest coverage plugin works, see if there's a pytest-native
# way to handle the env stuff too, then we can remove these tasks entirely
# in favor of just "run pytest"?
env = dict(os.environ)
if "SSH_AUTH_SOCK" in env:
del env["SSH_AUTH_SOCK"]
cmd = "{} {} {}".format(runner, opts, modstr)
# NOTE: we have a pytest.ini and tend to use that over PYTEST_ADDOPTS.
ctx.run(cmd, pty=True, env=env, replace_env=True)
@task
def coverage(ctx, opts=""):
"""
Execute all tests (normal and slow) with coverage enabled.
"""
test(ctx, coverage=True, include_slow=True, opts=opts)
# NOTE: codecov now handled purely in invocations/orb
@task
def guard(ctx, opts=""):
"""
Execute all tests and then watch for changes, re-running.
"""
# TODO if coverage was run via pytest-cov, we could add coverage here too
return test(ctx, include_slow=True, loop_on_fail=True, opts=opts)
# Until we stop bundling docs w/ releases. Need to discover use cases first.
# TODO: would be nice to tie this into our own version of build() too, but
# still have publish() use that build()...really need to try out classes!
# TODO 4.0: I'd like to just axe the 'built docs in sdist', none of my other
# projects do it.
@task
def publish_(
ctx, sdist=True, wheel=True, sign=False, dry_run=False, index=None
):
"""
Wraps invocations.packaging.publish to add baked-in docs folder.
"""
# Build docs first. Use terribad workaround pending invoke #146
ctx.run("inv docs", pty=True, hide=False)
# Move the built docs into where Epydocs used to live
target = "docs"
rmtree(target, ignore_errors=True)
# TODO: make it easier to yank out this config val from the docs coll
copytree("sites/docs/_build", target)
# Publish
publish(
ctx, sdist=sdist, wheel=wheel, sign=sign, dry_run=dry_run, index=index
)
# Also have to hack up the newly enhanced all_() so it uses our publish
@task(name="all", default=True)
def all_(c, dry_run=False):
release_coll["prepare"](c, dry_run=dry_run)
publish_(c, dry_run=dry_run)
release_coll["push"](c, dry_run=dry_run)
# TODO: "replace one task with another" needs a better public API, this is
# using unpublished internals & skips all the stuff add_task() does re:
# aliasing, defaults etc.
release_coll.tasks["publish"] = publish_
release_coll.tasks["all"] = all_
ns = Collection(
test,
coverage,
guard,
release_coll,
docs,
www,
watch_docs,
sites,
count_errors,
checks.blacken,
checks,
)
ns.configure(
{
"packaging": {
# NOTE: many of these are also set in kwarg defaults above; but
# having them here too means once we get rid of our custom
# release(), the behavior stays.
"sign": False,
"wheel": True,
"changelog_file": join(
www.configuration()["sphinx"]["source"], "changelog.rst"
),
},
"docs": {"browse": "remote"},
}
)
| 5,401 | Python | .py | 148 | 31.445946 | 79 | 0.655594 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
552 | setup_helper.py | paramiko_paramiko/setup_helper.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# Note: Despite the copyright notice, this was submitted by John
# Arbash Meinel. Thanks John!
"""A small set of helper functions for dealing with setup issues"""
import os
import tarfile
from distutils import log
import distutils.archive_util
from distutils.dir_util import mkpath
from distutils.spawn import spawn
try:
from pwd import getpwnam
except ImportError:
getpwnam = None
try:
from grp import getgrnam
except ImportError:
getgrnam = None
def _get_gid(name):
"""Returns a gid, given a group name."""
if getgrnam is None or name is None:
return None
try:
result = getgrnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None
def _get_uid(name):
"""Returns an uid, given a user name."""
if getpwnam is None or name is None:
return None
try:
result = getpwnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None
def make_tarball(
base_name,
base_dir,
compress="gzip",
verbose=0,
dry_run=0,
owner=None,
group=None,
):
"""Create a tar file from all the files under 'base_dir'.
This file may be compressed.
:param compress: Compression algorithms. Supported algorithms are:
'gzip': (the default)
'compress'
'bzip2'
None
For 'gzip' and 'bzip2' the internal tarfile module will be used.
For 'compress' the .tar will be created using tarfile, and then
we will spawn 'compress' afterwards.
The output tar file will be named 'base_name' + ".tar",
possibly plus the appropriate compression extension (".gz",
".bz2" or ".Z"). Return the output filename.
"""
# XXX GNU tar 1.13 has a nifty option to add a prefix directory.
# It's pretty new, though, so we certainly can't require it --
# but it would be nice to take advantage of it to skip the
# "create a tree of hardlinks" step! (Would also be nice to
# detect GNU tar to use its 'z' option and save a step.)
compress_ext = {"gzip": ".gz", "bzip2": ".bz2", "compress": ".Z"}
# flags for compression program, each element of list will be an argument
tarfile_compress_flag = {"gzip": "gz", "bzip2": "bz2"}
compress_flags = {"compress": ["-f"]}
if compress is not None and compress not in compress_ext.keys():
raise ValueError(
"bad value for 'compress': must be None, 'gzip',"
"'bzip2' or 'compress'"
)
archive_name = base_name + ".tar"
if compress and compress in tarfile_compress_flag:
archive_name += compress_ext[compress]
mode = "w:" + tarfile_compress_flag.get(compress, "")
mkpath(os.path.dirname(archive_name), dry_run=dry_run)
log.info(f"Creating tar file {archive_name} with mode {mode}")
uid = _get_uid(owner)
gid = _get_gid(group)
def _set_uid_gid(tarinfo):
if gid is not None:
tarinfo.gid = gid
tarinfo.gname = group
if uid is not None:
tarinfo.uid = uid
tarinfo.uname = owner
return tarinfo
if not dry_run:
tar = tarfile.open(archive_name, mode=mode)
# This recursively adds everything underneath base_dir
try:
tar.add(base_dir, filter=_set_uid_gid)
finally:
tar.close()
if compress and compress not in tarfile_compress_flag:
spawn(
[compress] + compress_flags[compress] + [archive_name],
dry_run=dry_run,
)
return archive_name + compress_ext[compress]
else:
return archive_name
_custom_formats = {
"gztar": (make_tarball, [("compress", "gzip")], "gzip'ed tar-file"),
"bztar": (make_tarball, [("compress", "bzip2")], "bzip2'ed tar-file"),
"ztar": (make_tarball, [("compress", "compress")], "compressed tar file"),
"tar": (make_tarball, [("compress", None)], "uncompressed tar file"),
}
# Hack in and insert ourselves into the distutils code base
def install_custom_make_tarball():
distutils.archive_util.ARCHIVE_FORMATS.update(_custom_formats)
| 4,992 | Python | .py | 133 | 32.090226 | 79 | 0.669495 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
553 | shared_conf.py | paramiko_paramiko/sites/shared_conf.py | from datetime import datetime
import alabaster
# Alabaster theme + mini-extension
html_theme_path = [alabaster.get_path()]
extensions = ["alabaster", "sphinx.ext.intersphinx"]
# Paths relative to invoking conf.py - not this shared file
html_theme = "alabaster"
html_theme_options = {
"description": "A Python implementation of SSHv2.",
"github_user": "paramiko",
"github_repo": "paramiko",
"analytics_id": "UA-18486793-2",
"travis_button": False,
"tidelift_url": "https://tidelift.com/subscription/pkg/pypi-paramiko?utm_source=pypi-paramiko&utm_medium=referral&utm_campaign=docs",
}
html_sidebars = {
"**": ["about.html", "navigation.html", "searchbox.html", "donate.html"]
}
# Everything intersphinx's to Python
intersphinx_mapping = {"python": ("https://docs.python.org/3.6/", None)}
# Regular settings
project = "Paramiko"
copyright = f"{datetime.now().year} Jeff Forcier"
master_doc = "index"
templates_path = ["_templates"]
exclude_trees = ["_build"]
source_suffix = ".rst"
default_role = "obj"
| 1,033 | Python | .py | 28 | 34.714286 | 137 | 0.721 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
554 | conf.py | paramiko_paramiko/sites/www/conf.py | # Obtain shared config values
from pathlib import Path
import os
import sys
updir = Path(__file__).parent.parent.resolve()
sys.path.append(str(updir))
from shared_conf import *
# Releases changelog extension
extensions.append("releases")
releases_release_uri = "https://github.com/paramiko/paramiko/tree/%s"
releases_issue_uri = "https://github.com/paramiko/paramiko/issues/%s"
releases_development_branch = "main"
# Don't show unreleased_X.x sections up top for 1.x or 2.x anymore
releases_supported_versions = [3]
# Default is 'local' building, but reference the public docs site when building
# under RTD.
target = updir / "docs" / "_build"
if os.environ.get("READTHEDOCS") == "True":
target = "http://docs.paramiko.org/en/latest/"
intersphinx_mapping["docs"] = (str(target), None)
# Sister-site links to API docs
html_theme_options["extra_nav_links"] = {
"API Docs": "http://docs.paramiko.org"
}
| 912 | Python | .py | 24 | 36.5 | 79 | 0.748869 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
555 | conf.py | paramiko_paramiko/sites/docs/conf.py | # Obtain shared config values
import os, sys
from os.path import abspath, join, dirname
sys.path.append(abspath(".."))
sys.path.append(abspath("../.."))
from shared_conf import *
# Enable autodoc, intersphinx
extensions.extend(["sphinx.ext.autodoc"])
# Autodoc settings
autodoc_default_options = {
"members": True,
"special-members": True,
}
# Default is 'local' building, but reference the public www site when building
# under RTD.
target = join(dirname(__file__), "..", "www", "_build")
if os.environ.get("READTHEDOCS") == "True":
target = "http://paramiko.org"
intersphinx_mapping["www"] = (target, None)
# Sister-site links to WWW
html_theme_options["extra_nav_links"] = {
"Main website": "http://www.paramiko.org"
}
| 743 | Python | .py | 23 | 30.391304 | 78 | 0.71049 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
556 | conftest.py | paramiko_paramiko/tests/conftest.py | import logging
import os
import shutil
import threading
from pathlib import Path
from invoke.vendor.lexicon import Lexicon
import pytest
from paramiko import (
SFTPServer,
SFTP,
Transport,
DSSKey,
RSAKey,
Ed25519Key,
ECDSAKey,
PKey,
)
from ._loop import LoopSocket
from ._stub_sftp import StubServer, StubSFTPServer
from ._util import _support
from icecream import ic, install as install_ic
# Better print() for debugging - use ic()!
install_ic()
ic.configureOutput(includeContext=True)
# Perform logging by default; pytest will capture and thus hide it normally,
# presenting it on error/failure. (But also allow turning it off when doing
# very pinpoint debugging - e.g. using breakpoints, so you don't want output
# hiding enabled, but also don't want all the logging to gum up the terminal.)
if not os.environ.get("DISABLE_LOGGING", False):
logging.basicConfig(
level=logging.DEBUG,
# Also make sure to set up timestamping for more sanity when debugging.
format="[%(relativeCreated)s]\t%(levelname)s:%(name)s:%(message)s",
datefmt="%H:%M:%S",
)
def make_sftp_folder():
"""
Ensure expected target temp folder exists on the remote end.
Will clean it out if it already exists.
"""
# TODO: go back to using the sftp functionality itself for folder setup so
# we can test against live SFTP servers again someday. (Not clear if anyone
# is/was using the old capability for such, though...)
# TODO: something that would play nicer with concurrent testing (but
# probably e.g. using thread ID or UUIDs or something; not the "count up
# until you find one not used!" crap from before...)
# TODO: if we want to lock ourselves even harder into localhost-only
# testing (probably not?) could use tempdir modules for this for improved
# safety. Then again...why would someone have such a folder???
path = os.environ.get("TEST_FOLDER", "paramiko-test-target")
# Forcibly nuke this directory locally, since at the moment, the below
# fixtures only ever run with a locally scoped stub test server.
shutil.rmtree(path, ignore_errors=True)
# Then create it anew, again locally, for the same reason.
os.mkdir(path)
return path
@pytest.fixture # (scope='session')
def sftp_server():
"""
Set up an in-memory SFTP server thread. Yields the client Transport/socket.
The resulting client Transport (along with all the server components) will
be the same object throughout the test session; the `sftp` fixture then
creates new higher level client objects wrapped around the client
Transport, as necessary.
"""
# Sockets & transports
socks = LoopSocket()
sockc = LoopSocket()
sockc.link(socks)
# TODO: reuse with new server fixture if possible
tc = Transport(sockc)
ts = Transport(socks)
# Auth
host_key = RSAKey.from_private_key_file(_support("rsa.key"))
ts.add_server_key(host_key)
# Server setup
event = threading.Event()
server = StubServer()
ts.set_subsystem_handler("sftp", SFTPServer, StubSFTPServer)
ts.start_server(event, server)
# Wait (so client has time to connect? Not sure. Old.)
event.wait(1.0)
# Make & yield connection.
tc.connect(username="slowdive", password="pygmalion")
yield tc
# TODO: any need for shutdown? Why didn't old suite do so? Or was that the
# point of the "join all threads from threading module" crap in test.py?
@pytest.fixture
def sftp(sftp_server):
"""
Yield an SFTP client connected to the global in-session SFTP server thread.
"""
# Client setup
client = SFTP.from_transport(sftp_server)
# Work in 'remote' folder setup (as it wants to use the client)
# TODO: how cleanest to make this available to tests? Doing it this way is
# marginally less bad than the previous 'global'-using setup, but not by
# much?
client.FOLDER = make_sftp_folder()
# Yield client to caller
yield client
# Clean up - as in make_sftp_folder, we assume local-only exec for now.
shutil.rmtree(client.FOLDER, ignore_errors=True)
key_data = [
["ssh-rsa", RSAKey, "SHA256:OhNL391d/beeFnxxg18AwWVYTAHww+D4djEE7Co0Yng"],
["ssh-dss", DSSKey, "SHA256:uHwwykG099f4M4kfzvFpKCTino0/P03DRbAidpAmPm0"],
[
"ssh-ed25519",
Ed25519Key,
"SHA256:J6VESFdD3xSChn8y9PzWzeF+1tl892mOy2TqkMLO4ow",
],
[
"ecdsa-sha2-nistp256",
ECDSAKey,
"SHA256:BrQG04oNKUETjKCeL4ifkARASg3yxS/pUHl3wWM26Yg",
],
]
for datum in key_data:
# Add true first member with human-facing short algo name
short = datum[0].replace("ssh-", "").replace("sha2-nistp", "")
datum.insert(0, short)
@pytest.fixture(scope="session", params=key_data, ids=lambda x: x[0])
def keys(request):
"""
Yield an object for each known type of key, with attributes:
- ``short_type``: short identifier, eg ``rsa`` or ``ecdsa-256``
- ``full_type``: the "message style" key identifier, eg ``ssh-rsa``, or
``ecdsa-sha2-nistp256``.
- ``path``: a pathlib Path object to the fixture key file
- ``pkey``: PKey object, which may or may not also have a cert loaded
- ``expected_fp``: the expected fingerprint of said key
"""
short_type, key_type, key_class, fingerprint = request.param
bag = Lexicon()
bag.short_type = short_type
bag.full_type = key_type
bag.path = Path(_support(f"{short_type}.key"))
with bag.path.open() as fd:
bag.pkey = key_class.from_private_key(fd)
# Second copy for things like equality-but-not-identity testing
with bag.path.open() as fd:
bag.pkey2 = key_class.from_private_key(fd)
bag.expected_fp = fingerprint
# Also tack on the cert-bearing variant for some tests
cert = bag.path.with_suffix(".key-cert.pub")
bag.pkey_with_cert = PKey.from_path(cert) if cert.exists() else None
# Safety checks
assert bag.pkey.fingerprint == fingerprint
yield bag
| 6,027 | Python | .py | 149 | 35.805369 | 79 | 0.702578 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
557 | test_message.py | paramiko_paramiko/tests/test_message.py | # Copyright (C) 2003-2009 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Some unit tests for ssh protocol message blocks.
"""
import unittest
from paramiko.message import Message
from paramiko.common import byte_chr, zero_byte
class MessageTest(unittest.TestCase):
__a = (
b"\x00\x00\x00\x17\x07\x60\xe0\x90\x00\x00\x00\x01\x71\x00\x00\x00\x05\x68\x65\x6c\x6c\x6f\x00\x00\x03\xe8" # noqa
+ b"x" * 1000
)
__b = b"\x01\x00\xf3\x00\x3f\x00\x00\x00\x10\x68\x75\x65\x79\x2c\x64\x65\x77\x65\x79\x2c\x6c\x6f\x75\x69\x65" # noqa
__c = b"\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\xf5\xe4\xd3\xc2\xb1\x09\x00\x00\x00\x01\x11\x00\x00\x00\x07\x00\xf5\xe4\xd3\xc2\xb1\x09\x00\x00\x00\x06\x9a\x1b\x2c\x3d\x4e\xf7" # noqa
__d = b"\x00\x00\x00\x05\xff\x00\x00\x00\x05\x11\x22\x33\x44\x55\xff\x00\x00\x00\x0a\x00\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x03\x63\x61\x74\x00\x00\x00\x03\x61\x2c\x62" # noqa
def test_encode(self):
msg = Message()
msg.add_int(23)
msg.add_int(123789456)
msg.add_string("q")
msg.add_string("hello")
msg.add_string("x" * 1000)
self.assertEqual(msg.asbytes(), self.__a)
msg = Message()
msg.add_boolean(True)
msg.add_boolean(False)
msg.add_byte(byte_chr(0xF3))
msg.add_bytes(zero_byte + byte_chr(0x3F))
msg.add_list(["huey", "dewey", "louie"])
self.assertEqual(msg.asbytes(), self.__b)
msg = Message()
msg.add_int64(5)
msg.add_int64(0xF5E4D3C2B109)
msg.add_mpint(17)
msg.add_mpint(0xF5E4D3C2B109)
msg.add_mpint(-0x65E4D3C2B109)
self.assertEqual(msg.asbytes(), self.__c)
def test_decode(self):
msg = Message(self.__a)
self.assertEqual(msg.get_int(), 23)
self.assertEqual(msg.get_int(), 123789456)
self.assertEqual(msg.get_text(), "q")
self.assertEqual(msg.get_text(), "hello")
self.assertEqual(msg.get_text(), "x" * 1000)
msg = Message(self.__b)
self.assertEqual(msg.get_boolean(), True)
self.assertEqual(msg.get_boolean(), False)
self.assertEqual(msg.get_byte(), byte_chr(0xF3))
self.assertEqual(msg.get_bytes(2), zero_byte + byte_chr(0x3F))
self.assertEqual(msg.get_list(), ["huey", "dewey", "louie"])
msg = Message(self.__c)
self.assertEqual(msg.get_int64(), 5)
self.assertEqual(msg.get_int64(), 0xF5E4D3C2B109)
self.assertEqual(msg.get_mpint(), 17)
self.assertEqual(msg.get_mpint(), 0xF5E4D3C2B109)
self.assertEqual(msg.get_mpint(), -0x65E4D3C2B109)
def test_add(self):
msg = Message()
msg.add(5)
msg.add(0x1122334455)
msg.add(0xF00000000000000000)
msg.add(True)
msg.add("cat")
msg.add(["a", "b"])
self.assertEqual(msg.asbytes(), self.__d)
def test_misc(self):
msg = Message(self.__d)
self.assertEqual(msg.get_adaptive_int(), 5)
self.assertEqual(msg.get_adaptive_int(), 0x1122334455)
self.assertEqual(msg.get_adaptive_int(), 0xF00000000000000000)
self.assertEqual(msg.get_so_far(), self.__d[:29])
self.assertEqual(msg.get_remainder(), self.__d[29:])
msg.rewind()
self.assertEqual(msg.get_adaptive_int(), 5)
self.assertEqual(msg.get_so_far(), self.__d[:4])
self.assertEqual(msg.get_remainder(), self.__d[4:])
def test_bytes_str_and_repr(self):
msg = Message(self.__d)
assert str(msg) == f"paramiko.Message({self.__d!r})"
assert repr(msg) == str(msg)
assert bytes(msg) == msg.asbytes() == self.__d
| 4,432 | Python | .py | 97 | 38.917526 | 197 | 0.650614 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
558 | test_kex.py | paramiko_paramiko/tests/test_kex.py | # Copyright (C) 2003-2009 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Some unit tests for the key exchange protocols.
"""
from binascii import hexlify, unhexlify
import os
import unittest
from unittest.mock import Mock, patch
import pytest
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import ec
try:
from cryptography.hazmat.primitives.asymmetric import x25519
except ImportError:
x25519 = None
import paramiko.util
from paramiko.kex_group1 import KexGroup1
from paramiko.kex_group14 import KexGroup14SHA256
from paramiko.kex_gex import KexGex, KexGexSHA256
from paramiko import Message
from paramiko.common import byte_chr
from paramiko.kex_ecdh_nist import KexNistp256
from paramiko.kex_group16 import KexGroup16SHA512
from paramiko.kex_curve25519 import KexCurve25519
def dummy_urandom(n):
return byte_chr(0xCC) * n
def dummy_generate_key_pair(obj):
private_key_value = 94761803665136558137557783047955027733968423115106677159790289642479432803037 # noqa
public_key_numbers = "042bdab212fa8ba1b7c843301682a4db424d307246c7e1e6083c41d9ca7b098bf30b3d63e2ec6278488c135360456cc054b3444ecc45998c08894cbc1370f5f989" # noqa
public_key_numbers_obj = ec.EllipticCurvePublicKey.from_encoded_point(
ec.SECP256R1(), unhexlify(public_key_numbers)
).public_numbers()
obj.P = ec.EllipticCurvePrivateNumbers(
private_value=private_key_value, public_numbers=public_key_numbers_obj
).private_key(default_backend())
if obj.transport.server_mode:
obj.Q_S = ec.EllipticCurvePublicKey.from_encoded_point(
ec.SECP256R1(), unhexlify(public_key_numbers)
)
return
obj.Q_C = ec.EllipticCurvePublicKey.from_encoded_point(
ec.SECP256R1(), unhexlify(public_key_numbers)
)
class FakeKey:
def __str__(self):
return "fake-key"
def asbytes(self):
return b"fake-key"
def sign_ssh_data(self, H, algorithm):
return b"fake-sig"
class FakeModulusPack:
P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF # noqa
G = 2
def get_modulus(self, min, ask, max):
return self.G, self.P
class FakeTransport:
local_version = "SSH-2.0-paramiko_1.0"
remote_version = "SSH-2.0-lame"
local_kex_init = "local-kex-init"
remote_kex_init = "remote-kex-init"
host_key_type = "fake-key"
def _send_message(self, m):
self._message = m
def _expect_packet(self, *t):
self._expect = t
def _set_K_H(self, K, H):
self._K = K
self._H = H
def _verify_key(self, host_key, sig):
self._verify = (host_key, sig)
def _activate_outbound(self):
self._activated = True
def _log(self, level, s):
pass
def get_server_key(self):
return FakeKey()
def _get_modulus_pack(self):
return FakeModulusPack()
class KexTest(unittest.TestCase):
K = 14730343317708716439807310032871972459448364195094179797249681733965528989482751523943515690110179031004049109375612685505881911274101441415545039654102474376472240501616988799699744135291070488314748284283496055223852115360852283821334858541043710301057312858051901453919067023103730011648890038847384890504 # noqa
def setUp(self):
self._original_urandom = os.urandom
os.urandom = dummy_urandom
self._original_generate_key_pair = KexNistp256._generate_key_pair
KexNistp256._generate_key_pair = dummy_generate_key_pair
if KexCurve25519.is_available():
static_x25519_key = x25519.X25519PrivateKey.from_private_bytes(
unhexlify(
b"2184abc7eb3e656d2349d2470ee695b570c227340c2b2863b6c9ff427af1f040" # noqa
)
)
mock_x25519 = Mock()
mock_x25519.generate.return_value = static_x25519_key
patcher = patch(
"paramiko.kex_curve25519.X25519PrivateKey", mock_x25519
)
patcher.start()
self.x25519_patcher = patcher
def tearDown(self):
os.urandom = self._original_urandom
KexNistp256._generate_key_pair = self._original_generate_key_pair
if hasattr(self, "x25519_patcher"):
self.x25519_patcher.stop()
def test_group1_client(self):
transport = FakeTransport()
transport.server_mode = False
kex = KexGroup1(transport)
kex.start_kex()
x = b"1E000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D4" # noqa
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertEqual(
(paramiko.kex_group1._MSG_KEXDH_REPLY,), transport._expect
)
# fake "reply"
msg = Message()
msg.add_string("fake-host-key")
msg.add_mpint(69)
msg.add_string("fake-sig")
msg.rewind()
kex.parse_next(paramiko.kex_group1._MSG_KEXDH_REPLY, msg)
H = b"03079780F3D3AD0B3C6DB30C8D21685F367A86D2"
self.assertEqual(self.K, transport._K)
self.assertEqual(H, hexlify(transport._H).upper())
self.assertEqual((b"fake-host-key", b"fake-sig"), transport._verify)
self.assertTrue(transport._activated)
def test_group1_server(self):
transport = FakeTransport()
transport.server_mode = True
kex = KexGroup1(transport)
kex.start_kex()
self.assertEqual(
(paramiko.kex_group1._MSG_KEXDH_INIT,), transport._expect
)
msg = Message()
msg.add_mpint(69)
msg.rewind()
kex.parse_next(paramiko.kex_group1._MSG_KEXDH_INIT, msg)
H = b"B16BF34DD10945EDE84E9C1EF24A14BFDC843389"
x = b"1F0000000866616B652D6B6579000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D40000000866616B652D736967" # noqa
self.assertEqual(self.K, transport._K)
self.assertEqual(H, hexlify(transport._H).upper())
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertTrue(transport._activated)
def test_gex_client(self):
transport = FakeTransport()
transport.server_mode = False
kex = KexGex(transport)
kex.start_kex()
x = b"22000004000000080000002000"
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertEqual(
(paramiko.kex_gex._MSG_KEXDH_GEX_GROUP,), transport._expect
)
msg = Message()
msg.add_mpint(FakeModulusPack.P)
msg.add_mpint(FakeModulusPack.G)
msg.rewind()
kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_GROUP, msg)
x = b"20000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D4" # noqa
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertEqual(
(paramiko.kex_gex._MSG_KEXDH_GEX_REPLY,), transport._expect
)
msg = Message()
msg.add_string("fake-host-key")
msg.add_mpint(69)
msg.add_string("fake-sig")
msg.rewind()
kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_REPLY, msg)
H = b"A265563F2FA87F1A89BF007EE90D58BE2E4A4BD0"
self.assertEqual(self.K, transport._K)
self.assertEqual(H, hexlify(transport._H).upper())
self.assertEqual((b"fake-host-key", b"fake-sig"), transport._verify)
self.assertTrue(transport._activated)
def test_gex_old_client(self):
transport = FakeTransport()
transport.server_mode = False
kex = KexGex(transport)
kex.start_kex(_test_old_style=True)
x = b"1E00000800"
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertEqual(
(paramiko.kex_gex._MSG_KEXDH_GEX_GROUP,), transport._expect
)
msg = Message()
msg.add_mpint(FakeModulusPack.P)
msg.add_mpint(FakeModulusPack.G)
msg.rewind()
kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_GROUP, msg)
x = b"20000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D4" # noqa
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertEqual(
(paramiko.kex_gex._MSG_KEXDH_GEX_REPLY,), transport._expect
)
msg = Message()
msg.add_string("fake-host-key")
msg.add_mpint(69)
msg.add_string("fake-sig")
msg.rewind()
kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_REPLY, msg)
H = b"807F87B269EF7AC5EC7E75676808776A27D5864C"
self.assertEqual(self.K, transport._K)
self.assertEqual(H, hexlify(transport._H).upper())
self.assertEqual((b"fake-host-key", b"fake-sig"), transport._verify)
self.assertTrue(transport._activated)
def test_gex_server(self):
transport = FakeTransport()
transport.server_mode = True
kex = KexGex(transport)
kex.start_kex()
self.assertEqual(
(
paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST,
paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST_OLD,
),
transport._expect,
)
msg = Message()
msg.add_int(1024)
msg.add_int(2048)
msg.add_int(4096)
msg.rewind()
kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST, msg)
x = b"1F0000008100FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF0000000102" # noqa
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertEqual(
(paramiko.kex_gex._MSG_KEXDH_GEX_INIT,), transport._expect
)
msg = Message()
msg.add_mpint(12345)
msg.rewind()
kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_INIT, msg)
K = 67592995013596137876033460028393339951879041140378510871612128162185209509220726296697886624612526735888348020498716482757677848959420073720160491114319163078862905400020959196386947926388406687288901564192071077389283980347784184487280885335302632305026248574716290537036069329724382811853044654824945750581 # noqa
H = b"CE754197C21BF3452863B4F44D0B3951F12516EF"
x = b"210000000866616B652D6B6579000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D40000000866616B652D736967" # noqa
self.assertEqual(K, transport._K)
self.assertEqual(H, hexlify(transport._H).upper())
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertTrue(transport._activated)
def test_gex_server_with_old_client(self):
transport = FakeTransport()
transport.server_mode = True
kex = KexGex(transport)
kex.start_kex()
self.assertEqual(
(
paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST,
paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST_OLD,
),
transport._expect,
)
msg = Message()
msg.add_int(2048)
msg.rewind()
kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST_OLD, msg)
x = b"1F0000008100FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF0000000102" # noqa
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertEqual(
(paramiko.kex_gex._MSG_KEXDH_GEX_INIT,), transport._expect
)
msg = Message()
msg.add_mpint(12345)
msg.rewind()
kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_INIT, msg)
K = 67592995013596137876033460028393339951879041140378510871612128162185209509220726296697886624612526735888348020498716482757677848959420073720160491114319163078862905400020959196386947926388406687288901564192071077389283980347784184487280885335302632305026248574716290537036069329724382811853044654824945750581 # noqa
H = b"B41A06B2E59043CEFC1AE16EC31F1E2D12EC455B"
x = b"210000000866616B652D6B6579000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D40000000866616B652D736967" # noqa
self.assertEqual(K, transport._K)
self.assertEqual(H, hexlify(transport._H).upper())
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertTrue(transport._activated)
def test_gex_sha256_client(self):
transport = FakeTransport()
transport.server_mode = False
kex = KexGexSHA256(transport)
kex.start_kex()
x = b"22000004000000080000002000"
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertEqual(
(paramiko.kex_gex._MSG_KEXDH_GEX_GROUP,), transport._expect
)
msg = Message()
msg.add_mpint(FakeModulusPack.P)
msg.add_mpint(FakeModulusPack.G)
msg.rewind()
kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_GROUP, msg)
x = b"20000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D4" # noqa
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertEqual(
(paramiko.kex_gex._MSG_KEXDH_GEX_REPLY,), transport._expect
)
msg = Message()
msg.add_string("fake-host-key")
msg.add_mpint(69)
msg.add_string("fake-sig")
msg.rewind()
kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_REPLY, msg)
H = b"AD1A9365A67B4496F05594AD1BF656E3CDA0851289A4C1AFF549FEAE50896DF4"
self.assertEqual(self.K, transport._K)
self.assertEqual(H, hexlify(transport._H).upper())
self.assertEqual((b"fake-host-key", b"fake-sig"), transport._verify)
self.assertTrue(transport._activated)
def test_gex_sha256_old_client(self):
transport = FakeTransport()
transport.server_mode = False
kex = KexGexSHA256(transport)
kex.start_kex(_test_old_style=True)
x = b"1E00000800"
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertEqual(
(paramiko.kex_gex._MSG_KEXDH_GEX_GROUP,), transport._expect
)
msg = Message()
msg.add_mpint(FakeModulusPack.P)
msg.add_mpint(FakeModulusPack.G)
msg.rewind()
kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_GROUP, msg)
x = b"20000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D4" # noqa
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertEqual(
(paramiko.kex_gex._MSG_KEXDH_GEX_REPLY,), transport._expect
)
msg = Message()
msg.add_string("fake-host-key")
msg.add_mpint(69)
msg.add_string("fake-sig")
msg.rewind()
kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_REPLY, msg)
H = b"518386608B15891AE5237DEE08DCADDE76A0BCEFCE7F6DB3AD66BC41D256DFE5"
self.assertEqual(self.K, transport._K)
self.assertEqual(H, hexlify(transport._H).upper())
self.assertEqual((b"fake-host-key", b"fake-sig"), transport._verify)
self.assertTrue(transport._activated)
def test_gex_sha256_server(self):
transport = FakeTransport()
transport.server_mode = True
kex = KexGexSHA256(transport)
kex.start_kex()
self.assertEqual(
(
paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST,
paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST_OLD,
),
transport._expect,
)
msg = Message()
msg.add_int(1024)
msg.add_int(2048)
msg.add_int(4096)
msg.rewind()
kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST, msg)
x = b"1F0000008100FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF0000000102" # noqa
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertEqual(
(paramiko.kex_gex._MSG_KEXDH_GEX_INIT,), transport._expect
)
msg = Message()
msg.add_mpint(12345)
msg.rewind()
kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_INIT, msg)
K = 67592995013596137876033460028393339951879041140378510871612128162185209509220726296697886624612526735888348020498716482757677848959420073720160491114319163078862905400020959196386947926388406687288901564192071077389283980347784184487280885335302632305026248574716290537036069329724382811853044654824945750581 # noqa
H = b"CCAC0497CF0ABA1DBF55E1A3995D17F4CC31824B0E8D95CDF8A06F169D050D80"
x = b"210000000866616B652D6B6579000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D40000000866616B652D736967" # noqa
self.assertEqual(K, transport._K)
self.assertEqual(H, hexlify(transport._H).upper())
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertTrue(transport._activated)
def test_gex_sha256_server_with_old_client(self):
transport = FakeTransport()
transport.server_mode = True
kex = KexGexSHA256(transport)
kex.start_kex()
self.assertEqual(
(
paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST,
paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST_OLD,
),
transport._expect,
)
msg = Message()
msg.add_int(2048)
msg.rewind()
kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST_OLD, msg)
x = b"1F0000008100FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF0000000102" # noqa
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertEqual(
(paramiko.kex_gex._MSG_KEXDH_GEX_INIT,), transport._expect
)
msg = Message()
msg.add_mpint(12345)
msg.rewind()
kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_INIT, msg)
K = 67592995013596137876033460028393339951879041140378510871612128162185209509220726296697886624612526735888348020498716482757677848959420073720160491114319163078862905400020959196386947926388406687288901564192071077389283980347784184487280885335302632305026248574716290537036069329724382811853044654824945750581 # noqa
H = b"3DDD2AD840AD095E397BA4D0573972DC60F6461FD38A187CACA6615A5BC8ADBB"
x = b"210000000866616B652D6B6579000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D40000000866616B652D736967" # noqa
self.assertEqual(K, transport._K)
self.assertEqual(H, hexlify(transport._H).upper())
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertTrue(transport._activated)
def test_kex_nistp256_client(self):
K = 91610929826364598472338906427792435253694642563583721654249504912114314269754 # noqa
transport = FakeTransport()
transport.server_mode = False
kex = KexNistp256(transport)
kex.start_kex()
self.assertEqual(
(paramiko.kex_ecdh_nist._MSG_KEXECDH_REPLY,), transport._expect
)
# fake reply
msg = Message()
msg.add_string("fake-host-key")
Q_S = unhexlify(
"043ae159594ba062efa121480e9ef136203fa9ec6b6e1f8723a321c16e62b945f573f3b822258cbcd094b9fa1c125cbfe5f043280893e66863cc0cb4dccbe70210" # noqa
)
msg.add_string(Q_S)
msg.add_string("fake-sig")
msg.rewind()
kex.parse_next(paramiko.kex_ecdh_nist._MSG_KEXECDH_REPLY, msg)
H = b"BAF7CE243A836037EB5D2221420F35C02B9AB6C957FE3BDE3369307B9612570A"
self.assertEqual(K, kex.transport._K)
self.assertEqual(H, hexlify(transport._H).upper())
self.assertEqual((b"fake-host-key", b"fake-sig"), transport._verify)
self.assertTrue(transport._activated)
def test_kex_nistp256_server(self):
K = 91610929826364598472338906427792435253694642563583721654249504912114314269754 # noqa
transport = FakeTransport()
transport.server_mode = True
kex = KexNistp256(transport)
kex.start_kex()
self.assertEqual(
(paramiko.kex_ecdh_nist._MSG_KEXECDH_INIT,), transport._expect
)
# fake init
msg = Message()
Q_C = unhexlify(
"043ae159594ba062efa121480e9ef136203fa9ec6b6e1f8723a321c16e62b945f573f3b822258cbcd094b9fa1c125cbfe5f043280893e66863cc0cb4dccbe70210" # noqa
)
H = b"2EF4957AFD530DD3F05DBEABF68D724FACC060974DA9704F2AEE4C3DE861E7CA"
msg.add_string(Q_C)
msg.rewind()
kex.parse_next(paramiko.kex_ecdh_nist._MSG_KEXECDH_INIT, msg)
self.assertEqual(K, transport._K)
self.assertTrue(transport._activated)
self.assertEqual(H, hexlify(transport._H).upper())
def test_kex_group14_sha256_client(self):
transport = FakeTransport()
transport.server_mode = False
kex = KexGroup14SHA256(transport)
kex.start_kex()
x = b"1E00000101009850B3A8DE3ECCD3F19644139137C93D9C11BC28ED8BE850908EE294E1D43B88B9295311EFAEF5B736A1B652EBE184CCF36CFB0681C1ED66430088FA448B83619F928E7B9592ED6160EC11D639D51C303603F930F743C646B1B67DA38A1D44598DCE6C3F3019422B898044141420E9A10C29B9C58668F7F20A40F154B2C4768FCF7A9AA7179FB6366A7167EE26DD58963E8B880A0572F641DE0A73DC74C930F7C3A0C9388553F3F8403E40CF8B95FEDB1D366596FCF3FDDEB21A0005ADA650EF1733628D807BE5ACB83925462765D9076570056E39994FB328E3108FE406275758D6BF5F32790EF15D8416BF5548164859E785DB45E7787BB0E727ADE08641ED" # noqa
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertEqual(
(paramiko.kex_group1._MSG_KEXDH_REPLY,), transport._expect
)
# fake "reply"
msg = Message()
msg.add_string("fake-host-key")
msg.add_mpint(69)
msg.add_string("fake-sig")
msg.rewind()
kex.parse_next(paramiko.kex_group1._MSG_KEXDH_REPLY, msg)
K = 21526936926159575624241589599003964979640840086252478029709904308461709651400109485351462666820496096345766733042945918306284902585618061272525323382142547359684512114160415969631877620660064043178086464811345023251493620331559440565662862858765724251890489795332144543057725932216208403143759943169004775947331771556537814494448612329251887435553890674764339328444948425882382475260315505741818518926349729970262019325118040559191290279100613049085709127598666890434114956464502529053036826173452792849566280474995114751780998069614898221773345705289637708545219204637224261997310181473787577166103031529148842107599 # noqa
H = b"D007C23686BE8A7737F828DC9E899F8EB5AF423F495F138437BE2529C1B8455F"
self.assertEqual(K, transport._K)
self.assertEqual(H, hexlify(transport._H).upper())
self.assertEqual((b"fake-host-key", b"fake-sig"), transport._verify)
self.assertTrue(transport._activated)
def test_kex_group14_sha256_server(self):
transport = FakeTransport()
transport.server_mode = True
kex = KexGroup14SHA256(transport)
kex.start_kex()
self.assertEqual(
(paramiko.kex_group1._MSG_KEXDH_INIT,), transport._expect
)
msg = Message()
msg.add_mpint(69)
msg.rewind()
kex.parse_next(paramiko.kex_group1._MSG_KEXDH_INIT, msg)
K = 21526936926159575624241589599003964979640840086252478029709904308461709651400109485351462666820496096345766733042945918306284902585618061272525323382142547359684512114160415969631877620660064043178086464811345023251493620331559440565662862858765724251890489795332144543057725932216208403143759943169004775947331771556537814494448612329251887435553890674764339328444948425882382475260315505741818518926349729970262019325118040559191290279100613049085709127598666890434114956464502529053036826173452792849566280474995114751780998069614898221773345705289637708545219204637224261997310181473787577166103031529148842107599 # noqa
H = b"15080A19894D489ACD0DA724480E1B08E71293E07EBC25FAD10F263C00B343DC"
x = b"1F0000000866616B652D6B657900000101009850B3A8DE3ECCD3F19644139137C93D9C11BC28ED8BE850908EE294E1D43B88B9295311EFAEF5B736A1B652EBE184CCF36CFB0681C1ED66430088FA448B83619F928E7B9592ED6160EC11D639D51C303603F930F743C646B1B67DA38A1D44598DCE6C3F3019422B898044141420E9A10C29B9C58668F7F20A40F154B2C4768FCF7A9AA7179FB6366A7167EE26DD58963E8B880A0572F641DE0A73DC74C930F7C3A0C9388553F3F8403E40CF8B95FEDB1D366596FCF3FDDEB21A0005ADA650EF1733628D807BE5ACB83925462765D9076570056E39994FB328E3108FE406275758D6BF5F32790EF15D8416BF5548164859E785DB45E7787BB0E727ADE08641ED0000000866616B652D736967" # noqa
self.assertEqual(K, transport._K)
self.assertEqual(H, hexlify(transport._H).upper())
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertTrue(transport._activated)
def test_kex_group16_sha512_client(self):
transport = FakeTransport()
transport.server_mode = False
kex = KexGroup16SHA512(transport)
kex.start_kex()
x = b"1E0000020100859FF55A23E0F66463561DD8BFC4764C69C05F85665B06EC9E29EF5003A53A8FA890B6A6EB624DEB55A4FB279DE7010A53580A126817E3D235B05A1081662B1500961D0625F0AAD287F1B597CBA9DB9550D9CC26355C4C59F92E613B5C21AC191F152C09A5DB46DCBA5EA58E3CA6A8B0EB7183E27FAC10106022E8521FA91240FB389060F1E1E4A355049D29DCC82921CE6588791743E4B1DEEE0166F7CC5180C3C75F3773342DF95C8C10AAA5D12975257027936B99B3DED6E6E98CF27EADEAEAE04E7F0A28071F578646B985FCE28A59CEB36287CB65759BE0544D4C4018CDF03C9078FE9CA79ECA611CB6966899E6FD29BE0781491C659FE2380E0D99D50D9CFAAB94E61BE311779719C4C43C6D223AD3799C3915A9E55076A21152DBBF911D6594296D6ECDC1B6FA71997CD29DF987B80FCA7F36BB7F19863C72BBBF839746AFBF9A5B407D468C976AA3E36FA118D3EAAD2E08BF6AE219F81F2CE2BE946337F06CC09BBFABE938A4087E413921CBEC1965ED905999B83396ECA226110CDF6EFB80F815F6489AF87561DA3857F13A7705921306D94176231FBB336B17C3724BC17A28BECB910093AB040873D5D760E8C182B88ECCE3E38DDA68CE35BD152DF7550BD908791FCCEDD1FFDF5ED2A57FFAE79599E487A7726D8A3D950B1729A08FBB60EE462A6BBE8BF0F5F0E1358129A37840FE5B3EEB8BF26E99FA222EAE83" # noqa
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertEqual(
(paramiko.kex_group1._MSG_KEXDH_REPLY,), transport._expect
)
# fake "reply"
msg = Message()
msg.add_string("fake-host-key")
msg.add_mpint(69)
msg.add_string("fake-sig")
msg.rewind()
kex.parse_next(paramiko.kex_group1._MSG_KEXDH_REPLY, msg)
K = 933242830095376162107925500057692534838883186615567574891154103836907630698358649443101764908667358576734565553213003142941996368306996312915844839972197961603283544950658467545799914435739152351344917376359963584614213874232577733869049670230112638724993540996854599166318001059065780674008011575015459772051180901213815080343343801745386220342919837913506966863570473712948197760657442974564354432738520446202131551650771882909329069340612274196233658123593466135642819578182367229641847749149740891990379052266213711500434128970973602206842980669193719602075489724202241641553472106310932258574377789863734311328542715212248147206865762697424822447603031087553480483833829498375309975229907460562402877655519980113688369262871485777790149373908739910846630414678346163764464587129010141922982925829457954376352735653834300282864445132624993186496129911208133529828461690634463092007726349795944930302881758403402084584307180896465875803621285362317770276493727205689466142632599776710824902573926951951209239626732358074877997756011804454926541386215567756538832824717436605031489511654178384081883801272314328403020205577714999460724519735573055540814037716770051316113795603990199374791348798218428912977728347485489266146775472 # noqa
H = b"F6E2BCC846B9B62591EFB86663D55D4769CA06B2EDABE469DF831639B2DDD5A271985011900A724CB2C87F19F347B3632A7C1536AF3D12EE463E6EA75281AF0C" # noqa
self.assertEqual(K, transport._K)
self.assertEqual(H, hexlify(transport._H).upper())
self.assertEqual((b"fake-host-key", b"fake-sig"), transport._verify)
self.assertTrue(transport._activated)
def test_kex_group16_sha512_server(self):
transport = FakeTransport()
transport.server_mode = True
kex = KexGroup16SHA512(transport)
kex.start_kex()
self.assertEqual(
(paramiko.kex_group1._MSG_KEXDH_INIT,), transport._expect
)
msg = Message()
msg.add_mpint(69)
msg.rewind()
kex.parse_next(paramiko.kex_group1._MSG_KEXDH_INIT, msg)
K = 933242830095376162107925500057692534838883186615567574891154103836907630698358649443101764908667358576734565553213003142941996368306996312915844839972197961603283544950658467545799914435739152351344917376359963584614213874232577733869049670230112638724993540996854599166318001059065780674008011575015459772051180901213815080343343801745386220342919837913506966863570473712948197760657442974564354432738520446202131551650771882909329069340612274196233658123593466135642819578182367229641847749149740891990379052266213711500434128970973602206842980669193719602075489724202241641553472106310932258574377789863734311328542715212248147206865762697424822447603031087553480483833829498375309975229907460562402877655519980113688369262871485777790149373908739910846630414678346163764464587129010141922982925829457954376352735653834300282864445132624993186496129911208133529828461690634463092007726349795944930302881758403402084584307180896465875803621285362317770276493727205689466142632599776710824902573926951951209239626732358074877997756011804454926541386215567756538832824717436605031489511654178384081883801272314328403020205577714999460724519735573055540814037716770051316113795603990199374791348798218428912977728347485489266146775472 # noqa
H = b"F97BB05A572A663688CA7EA1AA812D3C82EE6C8FA9D4B1D69435783D931157F199909EA38B003E4E4385C8861183CBFF0CF0EF1433A8B3C69AB4DD9420FCC85F" # noqa
x = b"1F0000000866616B652D6B65790000020100859FF55A23E0F66463561DD8BFC4764C69C05F85665B06EC9E29EF5003A53A8FA890B6A6EB624DEB55A4FB279DE7010A53580A126817E3D235B05A1081662B1500961D0625F0AAD287F1B597CBA9DB9550D9CC26355C4C59F92E613B5C21AC191F152C09A5DB46DCBA5EA58E3CA6A8B0EB7183E27FAC10106022E8521FA91240FB389060F1E1E4A355049D29DCC82921CE6588791743E4B1DEEE0166F7CC5180C3C75F3773342DF95C8C10AAA5D12975257027936B99B3DED6E6E98CF27EADEAEAE04E7F0A28071F578646B985FCE28A59CEB36287CB65759BE0544D4C4018CDF03C9078FE9CA79ECA611CB6966899E6FD29BE0781491C659FE2380E0D99D50D9CFAAB94E61BE311779719C4C43C6D223AD3799C3915A9E55076A21152DBBF911D6594296D6ECDC1B6FA71997CD29DF987B80FCA7F36BB7F19863C72BBBF839746AFBF9A5B407D468C976AA3E36FA118D3EAAD2E08BF6AE219F81F2CE2BE946337F06CC09BBFABE938A4087E413921CBEC1965ED905999B83396ECA226110CDF6EFB80F815F6489AF87561DA3857F13A7705921306D94176231FBB336B17C3724BC17A28BECB910093AB040873D5D760E8C182B88ECCE3E38DDA68CE35BD152DF7550BD908791FCCEDD1FFDF5ED2A57FFAE79599E487A7726D8A3D950B1729A08FBB60EE462A6BBE8BF0F5F0E1358129A37840FE5B3EEB8BF26E99FA222EAE830000000866616B652D736967" # noqa
self.assertEqual(K, transport._K)
self.assertEqual(H, hexlify(transport._H).upper())
self.assertEqual(x, hexlify(transport._message.asbytes()).upper())
self.assertTrue(transport._activated)
@pytest.mark.skipif("not KexCurve25519.is_available()")
def test_kex_c25519_client(self):
K = 71294722834835117201316639182051104803802881348227506835068888449366462300724 # noqa
transport = FakeTransport()
transport.server_mode = False
kex = KexCurve25519(transport)
kex.start_kex()
self.assertEqual(
(paramiko.kex_curve25519._MSG_KEXECDH_REPLY,), transport._expect
)
# fake reply
msg = Message()
msg.add_string("fake-host-key")
Q_S = unhexlify(
"8d13a119452382a1ada8eea4c979f3e63ad3f0c7366786d6c5b54b87219bae49"
)
msg.add_string(Q_S)
msg.add_string("fake-sig")
msg.rewind()
kex.parse_next(paramiko.kex_curve25519._MSG_KEXECDH_REPLY, msg)
H = b"05B6F6437C0CF38D1A6C5A6F6E2558DEB54E7FC62447EBFB1E5D7407326A5475"
self.assertEqual(K, kex.transport._K)
self.assertEqual(H, hexlify(transport._H).upper())
self.assertEqual((b"fake-host-key", b"fake-sig"), transport._verify)
self.assertTrue(transport._activated)
@pytest.mark.skipif("not KexCurve25519.is_available()")
def test_kex_c25519_server(self):
K = 71294722834835117201316639182051104803802881348227506835068888449366462300724 # noqa
transport = FakeTransport()
transport.server_mode = True
kex = KexCurve25519(transport)
kex.start_kex()
self.assertEqual(
(paramiko.kex_curve25519._MSG_KEXECDH_INIT,), transport._expect
)
# fake init
msg = Message()
Q_C = unhexlify(
"8d13a119452382a1ada8eea4c979f3e63ad3f0c7366786d6c5b54b87219bae49"
)
H = b"DF08FCFCF31560FEE639D9B6D56D760BC3455B5ADA148E4514181023E7A9B042"
msg.add_string(Q_C)
msg.rewind()
kex.parse_next(paramiko.kex_curve25519._MSG_KEXECDH_INIT, msg)
self.assertEqual(K, transport._K)
self.assertTrue(transport._activated)
self.assertEqual(H, hexlify(transport._H).upper())
| 36,976 | Python | .py | 591 | 53.901861 | 1,253 | 0.769032 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
559 | test_sftp.py | paramiko_paramiko/tests/test_sftp.py | # Copyright (C) 2003-2009 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
some unit tests to make sure sftp works.
a real actual sftp server is contacted, and a new folder is created there to
do test file operations in (so no existing files will be harmed).
"""
import os
import socket
import sys
import warnings
from binascii import hexlify
from io import StringIO
from tempfile import mkstemp
import pytest
from paramiko.common import o777, o600, o666, o644
from paramiko.sftp_attr import SFTPAttributes
from paramiko.util import b, u
from tests import requireNonAsciiLocale
from ._util import needs_builtin
from ._util import slow
ARTICLE = """
Insulin sensitivity and liver insulin receptor structure in ducks from two
genera
T. Constantine, B. Chevalier, M. Derouet and J. Simon
Station de Recherches Avicoles, Institut National de la Recherche Agronomique,
Nouzilly, France.
Insulin sensitivity and liver insulin receptor structure were studied in
5-wk-old ducks from two genera (Muscovy and Pekin). In the fasting state, both
duck types were equally resistant to exogenous insulin compared with chicken.
Despite the low potency of duck insulin, the number of insulin receptors was
lower in Muscovy duck and similar in Pekin duck and chicken liver membranes.
After 125I-insulin cross-linking, the size of the alpha-subunit of the
receptors from the three species was 135,000. Wheat germ agglutinin-purified
receptors from the three species were contaminated by an active and unusual
adenosinetriphosphatase (ATPase) contaminant (highest activity in Muscovy
duck). Sequential purification of solubilized receptor from both duck types on
lentil and then wheat germ agglutinin lectins led to a fraction of receptors
very poor in ATPase activity that exhibited a beta-subunit size (95,000) and
tyrosine kinase activity similar to those of ATPase-free chicken insulin
receptors. Therefore the ducks from the two genera exhibit an alpha-beta-
structure for liver insulin receptors and a clear difference in the number of
liver insulin receptors. Their sensitivity to insulin is, however, similarly
decreased compared with chicken.
"""
# Here is how unicode characters are encoded over 1 to 6 bytes in utf-8
# U-00000000 - U-0000007F:
# 0xxxxxxx
# U-00000080 - U-000007FF:
# 110xxxxx 10xxxxxx
# U-00000800 - U-0000FFFF:
# 1110xxxx 10xxxxxx 10xxxxxx
# U-00010000 - U-001FFFFF:
# 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
# U-00200000 - U-03FFFFFF:
# 111110xx 10xxxxxx 10xxxxxx 10xxxxxx 10xxxxxx
# U-04000000 - U-7FFFFFFF:
# 1111110x 10xxxxxx 10xxxxxx 10xxxxxx 10xxxxxx 10xxxxxx
# Note that: hex(int('11000011',2)) == '0xc3'
# Thus, the following 2-bytes sequence is not valid utf8: "invalid continuation
# byte"
NON_UTF8_DATA = b"\xC3\xC3"
unicode_folder = "\u00fcnic\u00f8de"
utf8_folder = b"/\xc3\xbcnic\xc3\xb8\x64\x65"
@slow
class TestSFTP:
def test_file(self, sftp):
"""
verify that we can create a file.
"""
f = sftp.open(sftp.FOLDER + "/test", "w")
try:
assert f.stat().st_size == 0
finally:
f.close()
sftp.remove(sftp.FOLDER + "/test")
def test_close(self, sftp):
"""
Verify that SFTP session close() causes a socket error on next action.
"""
sftp.close()
with pytest.raises(socket.error, match="Socket is closed"):
sftp.open(sftp.FOLDER + "/test2", "w")
def test_sftp_can_be_used_as_context_manager(self, sftp):
"""
verify that the sftp session is closed when exiting the context manager
"""
with sftp:
pass
with pytest.raises(socket.error, match="Socket is closed"):
sftp.open(sftp.FOLDER + "/test2", "w")
def test_write(self, sftp):
"""
verify that a file can be created and written, and the size is correct.
"""
try:
with sftp.open(sftp.FOLDER + "/duck.txt", "w") as f:
f.write(ARTICLE)
assert sftp.stat(sftp.FOLDER + "/duck.txt").st_size == 1486
finally:
sftp.remove(sftp.FOLDER + "/duck.txt")
def test_sftp_file_can_be_used_as_context_manager(self, sftp):
"""
verify that an opened file can be used as a context manager
"""
try:
with sftp.open(sftp.FOLDER + "/duck.txt", "w") as f:
f.write(ARTICLE)
assert sftp.stat(sftp.FOLDER + "/duck.txt").st_size == 1486
finally:
sftp.remove(sftp.FOLDER + "/duck.txt")
def test_append(self, sftp):
"""
verify that a file can be opened for append, and tell() still works.
"""
try:
with sftp.open(sftp.FOLDER + "/append.txt", "w") as f:
f.write("first line\nsecond line\n")
assert f.tell() == 23
with sftp.open(sftp.FOLDER + "/append.txt", "a+") as f:
f.write("third line!!!\n")
assert f.tell() == 37
assert f.stat().st_size == 37
f.seek(-26, f.SEEK_CUR)
assert f.readline() == "second line\n"
finally:
sftp.remove(sftp.FOLDER + "/append.txt")
def test_rename(self, sftp):
"""
verify that renaming a file works.
"""
try:
with sftp.open(sftp.FOLDER + "/first.txt", "w") as f:
f.write("content!\n")
sftp.rename(
sftp.FOLDER + "/first.txt", sftp.FOLDER + "/second.txt"
)
with pytest.raises(IOError, match="No such file"):
sftp.open(sftp.FOLDER + "/first.txt", "r")
with sftp.open(sftp.FOLDER + "/second.txt", "r") as f:
f.seek(-6, f.SEEK_END)
assert u(f.read(4)) == "tent"
finally:
# TODO: this is gross, make some sort of 'remove if possible' / 'rm
# -f' a-like, jeez
try:
sftp.remove(sftp.FOLDER + "/first.txt")
except:
pass
try:
sftp.remove(sftp.FOLDER + "/second.txt")
except:
pass
def testa_posix_rename(self, sftp):
"""Test [email protected] protocol extension."""
try:
# first check that the normal rename works as specified
with sftp.open(sftp.FOLDER + "/a", "w") as f:
f.write("one")
sftp.rename(sftp.FOLDER + "/a", sftp.FOLDER + "/b")
with sftp.open(sftp.FOLDER + "/a", "w") as f:
f.write("two")
with pytest.raises(IOError): # actual message seems generic
sftp.rename(sftp.FOLDER + "/a", sftp.FOLDER + "/b")
# now check with the posix_rename
sftp.posix_rename(sftp.FOLDER + "/a", sftp.FOLDER + "/b")
with sftp.open(sftp.FOLDER + "/b", "r") as f:
data = u(f.read())
err = "Contents of renamed file not the same as original file"
assert "two" == data, err
finally:
try:
sftp.remove(sftp.FOLDER + "/a")
except:
pass
try:
sftp.remove(sftp.FOLDER + "/b")
except:
pass
def test_folder(self, sftp):
"""
create a temporary folder, verify that we can create a file in it, then
remove the folder and verify that we can't create a file in it anymore.
"""
sftp.mkdir(sftp.FOLDER + "/subfolder")
sftp.open(sftp.FOLDER + "/subfolder/test", "w").close()
sftp.remove(sftp.FOLDER + "/subfolder/test")
sftp.rmdir(sftp.FOLDER + "/subfolder")
# shouldn't be able to create that file if dir removed
with pytest.raises(IOError, match="No such file"):
sftp.open(sftp.FOLDER + "/subfolder/test")
def test_listdir(self, sftp):
"""
verify that a folder can be created, a bunch of files can be placed in
it, and those files show up in sftp.listdir.
"""
try:
sftp.open(sftp.FOLDER + "/duck.txt", "w").close()
sftp.open(sftp.FOLDER + "/fish.txt", "w").close()
sftp.open(sftp.FOLDER + "/tertiary.py", "w").close()
x = sftp.listdir(sftp.FOLDER)
assert len(x) == 3
assert "duck.txt" in x
assert "fish.txt" in x
assert "tertiary.py" in x
assert "random" not in x
finally:
sftp.remove(sftp.FOLDER + "/duck.txt")
sftp.remove(sftp.FOLDER + "/fish.txt")
sftp.remove(sftp.FOLDER + "/tertiary.py")
def test_listdir_iter(self, sftp):
"""
listdir_iter version of above test
"""
try:
sftp.open(sftp.FOLDER + "/duck.txt", "w").close()
sftp.open(sftp.FOLDER + "/fish.txt", "w").close()
sftp.open(sftp.FOLDER + "/tertiary.py", "w").close()
x = [x.filename for x in sftp.listdir_iter(sftp.FOLDER)]
assert len(x) == 3
assert "duck.txt" in x
assert "fish.txt" in x
assert "tertiary.py" in x
assert "random" not in x
finally:
sftp.remove(sftp.FOLDER + "/duck.txt")
sftp.remove(sftp.FOLDER + "/fish.txt")
sftp.remove(sftp.FOLDER + "/tertiary.py")
@requireNonAsciiLocale()
def test_listdir_in_locale(self, sftp):
"""Test listdir under a locale that uses non-ascii text."""
sftp.open(sftp.FOLDER + "/canard.txt", "w").close()
try:
folder_contents = sftp.listdir(sftp.FOLDER)
assert ["canard.txt"] == folder_contents
finally:
sftp.remove(sftp.FOLDER + "/canard.txt")
def test_setstat(self, sftp):
"""
verify that the setstat functions (chown, chmod, utime, truncate) work.
"""
try:
with sftp.open(sftp.FOLDER + "/special", "w") as f:
f.write("x" * 1024)
stat = sftp.stat(sftp.FOLDER + "/special")
sftp.chmod(sftp.FOLDER + "/special", (stat.st_mode & ~o777) | o600)
stat = sftp.stat(sftp.FOLDER + "/special")
expected_mode = o600
if sys.platform == "win32":
# chmod not really functional on windows
expected_mode = o666
if sys.platform == "cygwin":
# even worse.
expected_mode = o644
assert stat.st_mode & o777 == expected_mode
assert stat.st_size == 1024
mtime = stat.st_mtime - 3600
atime = stat.st_atime - 1800
sftp.utime(sftp.FOLDER + "/special", (atime, mtime))
stat = sftp.stat(sftp.FOLDER + "/special")
assert stat.st_mtime == mtime
if sys.platform not in ("win32", "cygwin"):
assert stat.st_atime == atime
# can't really test chown, since we'd have to know a valid uid.
sftp.truncate(sftp.FOLDER + "/special", 512)
stat = sftp.stat(sftp.FOLDER + "/special")
assert stat.st_size == 512
finally:
sftp.remove(sftp.FOLDER + "/special")
def test_fsetstat(self, sftp):
"""
verify that the fsetstat functions (chown, chmod, utime, truncate)
work on open files.
"""
try:
with sftp.open(sftp.FOLDER + "/special", "w") as f:
f.write("x" * 1024)
with sftp.open(sftp.FOLDER + "/special", "r+") as f:
stat = f.stat()
f.chmod((stat.st_mode & ~o777) | o600)
stat = f.stat()
expected_mode = o600
if sys.platform == "win32":
# chmod not really functional on windows
expected_mode = o666
if sys.platform == "cygwin":
# even worse.
expected_mode = o644
assert stat.st_mode & o777 == expected_mode
assert stat.st_size == 1024
mtime = stat.st_mtime - 3600
atime = stat.st_atime - 1800
f.utime((atime, mtime))
stat = f.stat()
assert stat.st_mtime == mtime
if sys.platform not in ("win32", "cygwin"):
assert stat.st_atime == atime
# can't really test chown, since we'd have to know a valid uid.
f.truncate(512)
stat = f.stat()
assert stat.st_size == 512
finally:
sftp.remove(sftp.FOLDER + "/special")
def test_readline_seek(self, sftp):
"""
create a text file and write a bunch of text into it. then count the
lines in the file, and seek around to retrieve particular lines. this
should verify that read buffering and 'tell' work well together, and
that read buffering is reset on 'seek'.
"""
try:
with sftp.open(sftp.FOLDER + "/duck.txt", "w") as f:
f.write(ARTICLE)
with sftp.open(sftp.FOLDER + "/duck.txt", "r+") as f:
line_number = 0
loc = 0
pos_list = []
for line in f:
line_number += 1
pos_list.append(loc)
loc = f.tell()
assert f.seekable()
f.seek(pos_list[6], f.SEEK_SET)
assert f.readline(), "Nouzilly == France.\n"
f.seek(pos_list[17], f.SEEK_SET)
assert f.readline()[:4] == "duck"
f.seek(pos_list[10], f.SEEK_SET)
expected = "duck types were equally resistant to exogenous insulin compared with chicken.\n" # noqa
assert f.readline() == expected
finally:
sftp.remove(sftp.FOLDER + "/duck.txt")
def test_write_seek(self, sftp):
"""
Create a text file, seek back, change it, and verify.
"""
try:
with sftp.open(sftp.FOLDER + "/testing.txt", "w") as f:
f.write("hello kitty.\n")
f.seek(-5, f.SEEK_CUR)
f.write("dd")
assert sftp.stat(sftp.FOLDER + "/testing.txt").st_size == 13
with sftp.open(sftp.FOLDER + "/testing.txt", "r") as f:
data = f.read(20)
assert data == b"hello kiddy.\n"
finally:
sftp.remove(sftp.FOLDER + "/testing.txt")
def test_symlink(self, sftp):
"""
create a symlink and then check that lstat doesn't follow it.
"""
if not hasattr(os, "symlink"):
# skip symlink tests on windows
return
try:
with sftp.open(sftp.FOLDER + "/original.txt", "w") as f:
f.write("original\n")
sftp.symlink("original.txt", sftp.FOLDER + "/link.txt")
assert sftp.readlink(sftp.FOLDER + "/link.txt") == "original.txt"
with sftp.open(sftp.FOLDER + "/link.txt", "r") as f:
assert f.readlines() == ["original\n"]
cwd = sftp.normalize(".")
if cwd[-1] == "/":
cwd = cwd[:-1]
abs_path = cwd + "/" + sftp.FOLDER + "/original.txt"
sftp.symlink(abs_path, sftp.FOLDER + "/link2.txt")
assert abs_path == sftp.readlink(sftp.FOLDER + "/link2.txt")
assert sftp.lstat(sftp.FOLDER + "/link.txt").st_size == 12
assert sftp.stat(sftp.FOLDER + "/link.txt").st_size == 9
# the sftp server may be hiding extra path members from us, so the
# length may be longer than we expect:
assert sftp.lstat(sftp.FOLDER + "/link2.txt").st_size >= len(
abs_path
)
assert sftp.stat(sftp.FOLDER + "/link2.txt").st_size == 9
assert sftp.stat(sftp.FOLDER + "/original.txt").st_size == 9
finally:
try:
sftp.remove(sftp.FOLDER + "/link.txt")
except:
pass
try:
sftp.remove(sftp.FOLDER + "/link2.txt")
except:
pass
try:
sftp.remove(sftp.FOLDER + "/original.txt")
except:
pass
def test_flush_seek(self, sftp):
"""
verify that buffered writes are automatically flushed on seek.
"""
try:
with sftp.open(sftp.FOLDER + "/happy.txt", "w", 1) as f:
f.write("full line.\n")
f.write("partial")
f.seek(9, f.SEEK_SET)
f.write("?\n")
with sftp.open(sftp.FOLDER + "/happy.txt", "r") as f:
assert f.readline() == u("full line?\n")
assert f.read(7) == b"partial"
finally:
try:
sftp.remove(sftp.FOLDER + "/happy.txt")
except:
pass
def test_realpath(self, sftp):
"""
test that realpath is returning something non-empty and not an
error.
"""
pwd = sftp.normalize(".")
assert len(pwd) > 0
f = sftp.normalize("./" + sftp.FOLDER)
assert len(f) > 0
assert os.path.join(pwd, sftp.FOLDER) == f
def test_mkdir(self, sftp):
"""
verify that mkdir/rmdir work.
"""
sftp.mkdir(sftp.FOLDER + "/subfolder")
with pytest.raises(IOError): # generic msg only
sftp.mkdir(sftp.FOLDER + "/subfolder")
sftp.rmdir(sftp.FOLDER + "/subfolder")
with pytest.raises(IOError, match="No such file"):
sftp.rmdir(sftp.FOLDER + "/subfolder")
def test_chdir(self, sftp):
"""
verify that chdir/getcwd work.
"""
root = sftp.normalize(".")
if root[-1] != "/":
root += "/"
try:
sftp.mkdir(sftp.FOLDER + "/alpha")
sftp.chdir(sftp.FOLDER + "/alpha")
sftp.mkdir("beta")
assert root + sftp.FOLDER + "/alpha" == sftp.getcwd()
assert ["beta"] == sftp.listdir(".")
sftp.chdir("beta")
with sftp.open("fish", "w") as f:
f.write("hello\n")
sftp.chdir("..")
assert ["fish"] == sftp.listdir("beta")
sftp.chdir("..")
assert ["fish"] == sftp.listdir("alpha/beta")
finally:
sftp.chdir(root)
try:
sftp.unlink(sftp.FOLDER + "/alpha/beta/fish")
except:
pass
try:
sftp.rmdir(sftp.FOLDER + "/alpha/beta")
except:
pass
try:
sftp.rmdir(sftp.FOLDER + "/alpha")
except:
pass
def test_get_put(self, sftp):
"""
verify that get/put work.
"""
warnings.filterwarnings("ignore", "tempnam.*")
fd, localname = mkstemp()
os.close(fd)
text = b"All I wanted was a plastic bunny rabbit.\n"
with open(localname, "wb") as f:
f.write(text)
saved_progress = []
def progress_callback(x, y):
saved_progress.append((x, y))
sftp.put(localname, sftp.FOLDER + "/bunny.txt", progress_callback)
with sftp.open(sftp.FOLDER + "/bunny.txt", "rb") as f:
assert text == f.read(128)
assert [(41, 41)] == saved_progress
os.unlink(localname)
fd, localname = mkstemp()
os.close(fd)
saved_progress = []
sftp.get(sftp.FOLDER + "/bunny.txt", localname, progress_callback)
with open(localname, "rb") as f:
assert text == f.read(128)
assert [(41, 41)] == saved_progress
os.unlink(localname)
sftp.unlink(sftp.FOLDER + "/bunny.txt")
def test_get_without_prefetch(self, sftp):
"""
Create a 4MB file. Verify that pull works without prefetching
using a lager file.
"""
sftp_filename = sftp.FOLDER + "/dummy_file"
num_chars = 1024 * 1024 * 4
fd, localname = mkstemp()
os.close(fd)
with open(localname, "wb") as f:
f.write(b"0" * num_chars)
sftp.put(localname, sftp_filename)
os.unlink(localname)
fd, localname = mkstemp()
os.close(fd)
sftp.get(sftp_filename, localname, prefetch=False)
assert os.stat(localname).st_size == num_chars
os.unlink(localname)
sftp.unlink(sftp_filename)
def test_check(self, sftp):
"""
verify that file.check() works against our own server.
(it's an sftp extension that we support, and may be the only ones who
support it.)
"""
with sftp.open(sftp.FOLDER + "/kitty.txt", "w") as f:
f.write("here kitty kitty" * 64)
try:
with sftp.open(sftp.FOLDER + "/kitty.txt", "r") as f:
sum = f.check("sha1")
assert (
"91059CFC6615941378D413CB5ADAF4C5EB293402"
== u(hexlify(sum)).upper()
)
sum = f.check("md5", 0, 512)
assert (
"93DE4788FCA28D471516963A1FE3856A"
== u(hexlify(sum)).upper()
)
sum = f.check("md5", 0, 0, 510)
expected = "EB3B45B8CD55A0707D99B177544A319F373183D241432BB2157AB9E46358C4AC90370B5CADE5D90336FC1716F90B36D6" # noqa
assert u(hexlify(sum)).upper() == expected
finally:
sftp.unlink(sftp.FOLDER + "/kitty.txt")
def test_x_flag(self, sftp):
"""
verify that the 'x' flag works when opening a file.
"""
sftp.open(sftp.FOLDER + "/unusual.txt", "wx").close()
try:
with pytest.raises(IOError):
sftp.open(sftp.FOLDER + "/unusual.txt", "wx")
finally:
sftp.unlink(sftp.FOLDER + "/unusual.txt")
def test_utf8(self, sftp):
"""
verify that unicode strings are encoded into utf8 correctly.
"""
with sftp.open(sftp.FOLDER + "/something", "w") as f:
f.write("okay")
try:
sftp.rename(
sftp.FOLDER + "/something", sftp.FOLDER + "/" + unicode_folder
)
sftp.open(b(sftp.FOLDER) + utf8_folder, "r")
finally:
sftp.unlink(b(sftp.FOLDER) + utf8_folder)
def test_utf8_chdir(self, sftp):
sftp.mkdir(sftp.FOLDER + "/" + unicode_folder)
try:
sftp.chdir(sftp.FOLDER + "/" + unicode_folder)
with sftp.open("something", "w") as f:
f.write("okay")
sftp.unlink("something")
finally:
sftp.chdir()
sftp.rmdir(sftp.FOLDER + "/" + unicode_folder)
def test_bad_readv(self, sftp):
"""
verify that readv at the end of the file doesn't essplode.
"""
sftp.open(sftp.FOLDER + "/zero", "w").close()
try:
with sftp.open(sftp.FOLDER + "/zero", "r") as f:
f.readv([(0, 12)])
with sftp.open(sftp.FOLDER + "/zero", "r") as f:
file_size = f.stat().st_size
f.prefetch(file_size)
f.read(100)
finally:
sftp.unlink(sftp.FOLDER + "/zero")
def test_put_without_confirm(self, sftp):
"""
verify that get/put work without confirmation.
"""
warnings.filterwarnings("ignore", "tempnam.*")
fd, localname = mkstemp()
os.close(fd)
text = b"All I wanted was a plastic bunny rabbit.\n"
with open(localname, "wb") as f:
f.write(text)
saved_progress = []
def progress_callback(x, y):
saved_progress.append((x, y))
res = sftp.put(
localname, sftp.FOLDER + "/bunny.txt", progress_callback, False
)
assert SFTPAttributes().attr == res.attr
with sftp.open(sftp.FOLDER + "/bunny.txt", "r") as f:
assert text == f.read(128)
assert (41, 41) == saved_progress[-1]
os.unlink(localname)
sftp.unlink(sftp.FOLDER + "/bunny.txt")
def test_getcwd(self, sftp):
"""
verify that chdir/getcwd work.
"""
assert sftp.getcwd() is None
root = sftp.normalize(".")
if root[-1] != "/":
root += "/"
try:
sftp.mkdir(sftp.FOLDER + "/alpha")
sftp.chdir(sftp.FOLDER + "/alpha")
assert sftp.getcwd() == "/" + sftp.FOLDER + "/alpha"
finally:
sftp.chdir(root)
try:
sftp.rmdir(sftp.FOLDER + "/alpha")
except:
pass
def test_seek_append(self, sftp):
"""
verify that seek doesn't affect writes during append.
does not work except through paramiko. :( openssh fails.
"""
try:
with sftp.open(sftp.FOLDER + "/append.txt", "a") as f:
f.write("first line\nsecond line\n")
f.seek(11, f.SEEK_SET)
f.write("third line\n")
with sftp.open(sftp.FOLDER + "/append.txt", "r") as f:
assert f.stat().st_size == 34
assert f.readline() == "first line\n"
assert f.readline() == "second line\n"
assert f.readline() == "third line\n"
finally:
sftp.remove(sftp.FOLDER + "/append.txt")
def test_putfo_empty_file(self, sftp):
"""
Send an empty file and confirm it is sent.
"""
target = sftp.FOLDER + "/empty file.txt"
stream = StringIO()
try:
attrs = sftp.putfo(stream, target)
# the returned attributes should not be null
assert attrs is not None
finally:
sftp.remove(target)
# TODO: this test doesn't actually fail if the regression (removing '%'
# expansion to '%%' within sftp.py's def _log()) is removed - stacktraces
# appear but they're clearly emitted from subthreads that have no error
# handling. No point running it until that is fixed somehow.
@pytest.mark.skip("Doesn't prove anything right now")
def test_file_with_percent(self, sftp):
"""
verify that we can create a file with a '%' in the filename.
( it needs to be properly escaped by _log() )
"""
f = sftp.open(sftp.FOLDER + "/test%file", "w")
try:
assert f.stat().st_size == 0
finally:
f.close()
sftp.remove(sftp.FOLDER + "/test%file")
def test_non_utf8_data(self, sftp):
"""Test write() and read() of non utf8 data"""
try:
with sftp.open(f"{sftp.FOLDER}/nonutf8data", "w") as f:
f.write(NON_UTF8_DATA)
with sftp.open(f"{sftp.FOLDER}/nonutf8data", "r") as f:
data = f.read()
assert data == NON_UTF8_DATA
with sftp.open(f"{sftp.FOLDER}/nonutf8data", "wb") as f:
f.write(NON_UTF8_DATA)
with sftp.open(f"{sftp.FOLDER}/nonutf8data", "rb") as f:
data = f.read()
assert data == NON_UTF8_DATA
finally:
sftp.remove(f"{sftp.FOLDER}/nonutf8data")
@requireNonAsciiLocale("LC_TIME")
def test_sftp_attributes_locale_time(self, sftp):
"""Test SFTPAttributes under a locale with non-ascii time strings."""
some_stat = os.stat(sftp.FOLDER)
sftp_attributes = SFTPAttributes.from_stat(some_stat, u("a_directory"))
assert b"a_directory" in sftp_attributes.asbytes()
def test_sftp_attributes_empty_str(self, sftp):
sftp_attributes = SFTPAttributes()
assert (
str(sftp_attributes)
== "?--------- 1 0 0 0 (unknown date) ?"
)
@needs_builtin("buffer")
def test_write_buffer(self, sftp):
"""Test write() using a buffer instance."""
data = 3 * b"A potentially large block of data to chunk up.\n"
try:
with sftp.open(f"{sftp.FOLDER}/write_buffer", "wb") as f:
for offset in range(0, len(data), 8):
f.write(buffer(data, offset, 8)) # noqa
with sftp.open(f"{sftp.FOLDER}/write_buffer", "rb") as f:
assert f.read() == data
finally:
sftp.remove(f"{sftp.FOLDER}/write_buffer")
@needs_builtin("memoryview")
def test_write_memoryview(self, sftp):
"""Test write() using a memoryview instance."""
data = 3 * b"A potentially large block of data to chunk up.\n"
try:
with sftp.open(f"{sftp.FOLDER}/write_memoryview", "wb") as f:
view = memoryview(data)
for offset in range(0, len(data), 8):
f.write(view[offset : offset + 8])
with sftp.open(f"{sftp.FOLDER}/write_memoryview", "rb") as f:
assert f.read() == data
finally:
sftp.remove(f"{sftp.FOLDER}/write_memoryview")
| 30,045 | Python | .py | 730 | 30.40137 | 133 | 0.553315 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
560 | test_packetizer.py | paramiko_paramiko/tests/test_packetizer.py | # Copyright (C) 2003-2009 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Some unit tests for the ssh2 protocol in Transport.
"""
import sys
import unittest
from hashlib import sha1
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.ciphers import algorithms, Cipher, modes
from paramiko import Message, Packetizer, util
from paramiko.common import byte_chr, zero_byte
from ._loop import LoopSocket
x55 = byte_chr(0x55)
x1f = byte_chr(0x1F)
class PacketizerTest(unittest.TestCase):
def test_write(self):
rsock = LoopSocket()
wsock = LoopSocket()
rsock.link(wsock)
p = Packetizer(wsock)
p.set_log(util.get_logger("paramiko.transport"))
p.set_hexdump(True)
encryptor = Cipher(
algorithms.AES(zero_byte * 16),
modes.CBC(x55 * 16),
backend=default_backend(),
).encryptor()
p.set_outbound_cipher(encryptor, 16, sha1, 12, x1f * 20)
# message has to be at least 16 bytes long, so we'll have at least one
# block of data encrypted that contains zero random padding bytes
m = Message()
m.add_byte(byte_chr(100))
m.add_int(100)
m.add_int(1)
m.add_int(900)
p.send_message(m)
data = rsock.recv(100)
# 32 + 12 bytes of MAC = 44
self.assertEqual(44, len(data))
self.assertEqual(
b"\x43\x91\x97\xbd\x5b\x50\xac\x25\x87\xc2\xc4\x6b\xc7\xe9\x38\xc0", # noqa
data[:16],
)
def test_read(self):
rsock = LoopSocket()
wsock = LoopSocket()
rsock.link(wsock)
p = Packetizer(rsock)
p.set_log(util.get_logger("paramiko.transport"))
p.set_hexdump(True)
decryptor = Cipher(
algorithms.AES(zero_byte * 16),
modes.CBC(x55 * 16),
backend=default_backend(),
).decryptor()
p.set_inbound_cipher(decryptor, 16, sha1, 12, x1f * 20)
wsock.send(
b"\x43\x91\x97\xbd\x5b\x50\xac\x25\x87\xc2\xc4\x6b\xc7\xe9\x38\xc0\x90\xd2\x16\x56\x0d\x71\x73\x61\x38\x7c\x4c\x3d\xfb\x97\x7d\xe2\x6e\x03\xb1\xa0\xc2\x1c\xd6\x41\x41\x4c\xb4\x59" # noqa
)
cmd, m = p.read_message()
self.assertEqual(100, cmd)
self.assertEqual(100, m.get_int())
self.assertEqual(1, m.get_int())
self.assertEqual(900, m.get_int())
def test_closed(self):
if sys.platform.startswith("win"): # no SIGALRM on windows
return
rsock = LoopSocket()
wsock = LoopSocket()
rsock.link(wsock)
p = Packetizer(wsock)
p.set_log(util.get_logger("paramiko.transport"))
p.set_hexdump(True)
encryptor = Cipher(
algorithms.AES(zero_byte * 16),
modes.CBC(x55 * 16),
backend=default_backend(),
).encryptor()
p.set_outbound_cipher(encryptor, 16, sha1, 12, x1f * 20)
# message has to be at least 16 bytes long, so we'll have at least one
# block of data encrypted that contains zero random padding bytes
m = Message()
m.add_byte(byte_chr(100))
m.add_int(100)
m.add_int(1)
m.add_int(900)
wsock.send = lambda x: 0
from functools import wraps
import errno
import os
import signal
class TimeoutError(Exception):
def __init__(self, error_message):
if hasattr(errno, "ETIME"):
self.message = os.sterror(errno.ETIME)
else:
self.messaage = error_message
def timeout(seconds=1, error_message="Timer expired"):
def decorator(func):
def _handle_timeout(signum, frame):
raise TimeoutError(error_message)
def wrapper(*args, **kwargs):
signal.signal(signal.SIGALRM, _handle_timeout)
signal.alarm(seconds)
try:
result = func(*args, **kwargs)
finally:
signal.alarm(0)
return result
return wraps(func)(wrapper)
return decorator
send = timeout()(p.send_message)
self.assertRaises(EOFError, send, m)
| 5,095 | Python | .py | 129 | 30.565891 | 199 | 0.614514 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
561 | test_ssh_gss.py | paramiko_paramiko/tests/test_ssh_gss.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
# Copyright (C) 2013-2014 science + computing ag
# Author: Sebastian Deiss <[email protected]>
#
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Unit Tests for the GSS-API / SSPI SSHv2 Authentication (gssapi-with-mic)
"""
import socket
import threading
import paramiko
from ._util import _support, needs_gssapi, KerberosTestCase, update_env
from .test_client import FINGERPRINTS
class NullServer(paramiko.ServerInterface):
def get_allowed_auths(self, username):
return "gssapi-with-mic,publickey"
def check_auth_gssapi_with_mic(
self, username, gss_authenticated=paramiko.AUTH_FAILED, cc_file=None
):
if gss_authenticated == paramiko.AUTH_SUCCESSFUL:
return paramiko.AUTH_SUCCESSFUL
return paramiko.AUTH_FAILED
def enable_auth_gssapi(self):
return True
def check_auth_publickey(self, username, key):
try:
expected = FINGERPRINTS[key.get_name()]
except KeyError:
return paramiko.AUTH_FAILED
else:
if key.get_fingerprint() == expected:
return paramiko.AUTH_SUCCESSFUL
return paramiko.AUTH_FAILED
def check_channel_request(self, kind, chanid):
return paramiko.OPEN_SUCCEEDED
def check_channel_exec_request(self, channel, command):
if command != b"yes":
return False
return True
@needs_gssapi
class GSSAuthTest(KerberosTestCase):
def setUp(self):
# TODO: username and targ_name should come from os.environ or whatever
# the approved pytest method is for runtime-configuring test data.
self.username = self.realm.user_princ
self.hostname = socket.getfqdn(self.realm.hostname)
self.sockl = socket.socket()
self.sockl.bind((self.realm.hostname, 0))
self.sockl.listen(1)
self.addr, self.port = self.sockl.getsockname()
self.event = threading.Event()
update_env(self, self.realm.env)
thread = threading.Thread(target=self._run)
thread.start()
def tearDown(self):
for attr in "tc ts socks sockl".split():
if hasattr(self, attr):
getattr(self, attr).close()
def _run(self):
self.socks, addr = self.sockl.accept()
self.ts = paramiko.Transport(self.socks)
host_key = paramiko.RSAKey.from_private_key_file(_support("rsa.key"))
self.ts.add_server_key(host_key)
server = NullServer()
self.ts.start_server(self.event, server)
def _test_connection(self, **kwargs):
"""
(Most) kwargs get passed directly into SSHClient.connect().
The exception is ... no exception yet
"""
host_key = paramiko.RSAKey.from_private_key_file(_support("rsa.key"))
public_host_key = paramiko.RSAKey(data=host_key.asbytes())
self.tc = paramiko.SSHClient()
self.tc.set_missing_host_key_policy(paramiko.WarningPolicy())
self.tc.get_host_keys().add(
f"[{self.addr}]:{self.port}", "ssh-rsa", public_host_key
)
self.tc.connect(
hostname=self.addr,
port=self.port,
username=self.username,
gss_host=self.hostname,
gss_auth=True,
**kwargs,
)
self.event.wait(1.0)
self.assert_(self.event.is_set())
self.assert_(self.ts.is_active())
self.assertEquals(self.username, self.ts.get_username())
self.assertEquals(True, self.ts.is_authenticated())
stdin, stdout, stderr = self.tc.exec_command("yes")
schan = self.ts.accept(1.0)
schan.send("Hello there.\n")
schan.send_stderr("This is on stderr.\n")
schan.close()
self.assertEquals("Hello there.\n", stdout.readline())
self.assertEquals("", stdout.readline())
self.assertEquals("This is on stderr.\n", stderr.readline())
self.assertEquals("", stderr.readline())
stdin.close()
stdout.close()
stderr.close()
def test_gss_auth(self):
"""
Verify that Paramiko can handle SSHv2 GSS-API / SSPI authentication
(gssapi-with-mic) in client and server mode.
"""
self._test_connection(allow_agent=False, look_for_keys=False)
def test_auth_trickledown(self):
"""
Failed gssapi-with-mic doesn't prevent subsequent key from succeeding
"""
self.hostname = (
"this_host_does_not_exists_and_causes_a_GSSAPI-exception"
)
self._test_connection(
key_filename=[_support("rsa.key")],
allow_agent=False,
look_for_keys=False,
)
| 5,468 | Python | .py | 135 | 32.918519 | 79 | 0.654861 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
562 | test_gssapi.py | paramiko_paramiko/tests/test_gssapi.py | # Copyright (C) 2013-2014 science + computing ag
# Author: Sebastian Deiss <[email protected]>
#
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Test the used APIs for GSS-API / SSPI authentication
"""
import socket
from ._util import needs_gssapi, KerberosTestCase, update_env
#
# NOTE: KerberosTestCase skips all tests if it was unable to import k5test
# third-party library. That's the primary trigger for whether this module
# effectively gets run or not. See tests/util.py for other triggers (a set of
# env vars a human might have defined).
#
@needs_gssapi
class GSSAPITest(KerberosTestCase):
def setUp(self):
super().setUp()
# TODO: these vars should all come from os.environ or whatever the
# approved pytest method is for runtime-configuring test data.
self.krb5_mech = "1.2.840.113554.1.2.2"
self.targ_name = self.realm.hostname
self.server_mode = False
update_env(self, self.realm.env)
def test_pyasn1(self):
"""
Test the used methods of pyasn1.
"""
from pyasn1.type.univ import ObjectIdentifier
from pyasn1.codec.der import encoder, decoder
oid = encoder.encode(ObjectIdentifier(self.krb5_mech))
mech, __ = decoder.decode(oid)
self.assertEquals(self.krb5_mech, mech.__str__())
def _gssapi_sspi_test(self):
"""
Test the used methods of python-gssapi or sspi, sspicon from pywin32.
"""
try:
import gssapi
if (
hasattr(gssapi, "__title__")
and gssapi.__title__ == "python-gssapi"
):
_API = "PYTHON-GSSAPI-OLD"
else:
_API = "PYTHON-GSSAPI-NEW"
except ImportError:
import sspicon
import sspi
_API = "SSPI"
c_token = None
gss_ctxt_status = False
mic_msg = b"G'day Mate!"
if _API == "PYTHON-GSSAPI-OLD":
if self.server_mode:
gss_flags = (
gssapi.C_PROT_READY_FLAG,
gssapi.C_INTEG_FLAG,
gssapi.C_MUTUAL_FLAG,
gssapi.C_DELEG_FLAG,
)
else:
gss_flags = (
gssapi.C_PROT_READY_FLAG,
gssapi.C_INTEG_FLAG,
gssapi.C_DELEG_FLAG,
)
# Initialize a GSS-API context.
ctx = gssapi.Context()
ctx.flags = gss_flags
krb5_oid = gssapi.OID.mech_from_string(self.krb5_mech)
target_name = gssapi.Name(
"host@" + self.targ_name, gssapi.C_NT_HOSTBASED_SERVICE
)
gss_ctxt = gssapi.InitContext(
peer_name=target_name, mech_type=krb5_oid, req_flags=ctx.flags
)
if self.server_mode:
c_token = gss_ctxt.step(c_token)
gss_ctxt_status = gss_ctxt.established
self.assertEquals(False, gss_ctxt_status)
# Accept a GSS-API context.
gss_srv_ctxt = gssapi.AcceptContext()
s_token = gss_srv_ctxt.step(c_token)
gss_ctxt_status = gss_srv_ctxt.established
self.assertNotEquals(None, s_token)
self.assertEquals(True, gss_ctxt_status)
# Establish the client context
c_token = gss_ctxt.step(s_token)
self.assertEquals(None, c_token)
else:
while not gss_ctxt.established:
c_token = gss_ctxt.step(c_token)
self.assertNotEquals(None, c_token)
# Build MIC
mic_token = gss_ctxt.get_mic(mic_msg)
if self.server_mode:
# Check MIC
status = gss_srv_ctxt.verify_mic(mic_msg, mic_token)
self.assertEquals(0, status)
elif _API == "PYTHON-GSSAPI-NEW":
if self.server_mode:
gss_flags = (
gssapi.RequirementFlag.protection_ready,
gssapi.RequirementFlag.integrity,
gssapi.RequirementFlag.mutual_authentication,
gssapi.RequirementFlag.delegate_to_peer,
)
else:
gss_flags = (
gssapi.RequirementFlag.protection_ready,
gssapi.RequirementFlag.integrity,
gssapi.RequirementFlag.delegate_to_peer,
)
# Initialize a GSS-API context.
krb5_oid = gssapi.MechType.kerberos
target_name = gssapi.Name(
"host@" + self.targ_name,
name_type=gssapi.NameType.hostbased_service,
)
gss_ctxt = gssapi.SecurityContext(
name=target_name,
flags=gss_flags,
mech=krb5_oid,
usage="initiate",
)
if self.server_mode:
c_token = gss_ctxt.step(c_token)
gss_ctxt_status = gss_ctxt.complete
self.assertEquals(False, gss_ctxt_status)
# Accept a GSS-API context.
gss_srv_ctxt = gssapi.SecurityContext(usage="accept")
s_token = gss_srv_ctxt.step(c_token)
gss_ctxt_status = gss_srv_ctxt.complete
self.assertNotEquals(None, s_token)
self.assertEquals(True, gss_ctxt_status)
# Establish the client context
c_token = gss_ctxt.step(s_token)
self.assertEquals(None, c_token)
else:
while not gss_ctxt.complete:
c_token = gss_ctxt.step(c_token)
self.assertNotEquals(None, c_token)
# Build MIC
mic_token = gss_ctxt.get_signature(mic_msg)
if self.server_mode:
# Check MIC
status = gss_srv_ctxt.verify_signature(mic_msg, mic_token)
self.assertEquals(0, status)
else:
gss_flags = (
sspicon.ISC_REQ_INTEGRITY
| sspicon.ISC_REQ_MUTUAL_AUTH
| sspicon.ISC_REQ_DELEGATE
)
# Initialize a GSS-API context.
target_name = "host/" + socket.getfqdn(self.targ_name)
gss_ctxt = sspi.ClientAuth(
"Kerberos", scflags=gss_flags, targetspn=target_name
)
if self.server_mode:
error, token = gss_ctxt.authorize(c_token)
c_token = token[0].Buffer
self.assertEquals(0, error)
# Accept a GSS-API context.
gss_srv_ctxt = sspi.ServerAuth("Kerberos", spn=target_name)
error, token = gss_srv_ctxt.authorize(c_token)
s_token = token[0].Buffer
# Establish the context.
error, token = gss_ctxt.authorize(s_token)
c_token = token[0].Buffer
self.assertEquals(None, c_token)
self.assertEquals(0, error)
# Build MIC
mic_token = gss_ctxt.sign(mic_msg)
# Check MIC
gss_srv_ctxt.verify(mic_msg, mic_token)
else:
error, token = gss_ctxt.authorize(c_token)
c_token = token[0].Buffer
self.assertNotEquals(0, error)
def test_gssapi_sspi_client(self):
"""
Test the used methods of python-gssapi or sspi, sspicon from pywin32.
"""
self._gssapi_sspi_test()
def test_gssapi_sspi_server(self):
"""
Test the used methods of python-gssapi or sspi, sspicon from pywin32.
"""
self.server_mode = True
self._gssapi_sspi_test()
| 8,574 | Python | .py | 208 | 28.754808 | 79 | 0.557432 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
563 | test_transport.py | paramiko_paramiko/tests/test_transport.py | # Copyright (C) 2003-2009 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Some unit tests for the ssh2 protocol in Transport.
"""
from binascii import hexlify
import itertools
import select
import socket
import time
import threading
import random
import unittest
from unittest.mock import Mock
from paramiko import (
AuthHandler,
ChannelException,
IncompatiblePeer,
MessageOrderError,
Packetizer,
RSAKey,
SSHException,
SecurityOptions,
ServiceRequestingTransport,
Transport,
)
from paramiko.auth_handler import AuthOnlyHandler
from paramiko import OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
from paramiko.common import (
DEFAULT_MAX_PACKET_SIZE,
DEFAULT_WINDOW_SIZE,
MAX_WINDOW_SIZE,
MIN_PACKET_SIZE,
MIN_WINDOW_SIZE,
MSG_CHANNEL_OPEN,
MSG_DEBUG,
MSG_IGNORE,
MSG_KEXINIT,
MSG_UNIMPLEMENTED,
MSG_USERAUTH_SUCCESS,
byte_chr,
cMSG_CHANNEL_WINDOW_ADJUST,
cMSG_UNIMPLEMENTED,
)
from paramiko.message import Message
from ._util import (
needs_builtin,
_support,
requires_sha1_signing,
slow,
server,
_disable_sha2,
_disable_sha1,
TestServer as NullServer,
)
from ._loop import LoopSocket
from pytest import mark, raises
LONG_BANNER = """\
Welcome to the super-fun-land BBS, where our MOTD is the primary thing we
provide. All rights reserved. Offer void in Tennessee. Stunt drivers were
used. Do not attempt at home. Some restrictions apply.
Happy birthday to Commie the cat!
Note: An SSH banner may eventually appear.
Maybe.
"""
# Faux 'packet type' we do not implement and are unlikely ever to (but which is
# technically "within spec" re RFC 4251
MSG_FUGGEDABOUTIT = 253
class TransportTest(unittest.TestCase):
# TODO: this can get nuked once ServiceRequestingTransport becomes the
# only Transport, as it has this baked in.
_auth_handler_class = AuthHandler
def setUp(self):
self.socks = LoopSocket()
self.sockc = LoopSocket()
self.sockc.link(self.socks)
self.tc = Transport(self.sockc)
self.ts = Transport(self.socks)
def tearDown(self):
self.tc.close()
self.ts.close()
self.socks.close()
self.sockc.close()
# TODO: unify with newer contextmanager
def setup_test_server(
self, client_options=None, server_options=None, connect_kwargs=None
):
host_key = RSAKey.from_private_key_file(_support("rsa.key"))
public_host_key = RSAKey(data=host_key.asbytes())
self.ts.add_server_key(host_key)
if client_options is not None:
client_options(self.tc.get_security_options())
if server_options is not None:
server_options(self.ts.get_security_options())
event = threading.Event()
self.server = NullServer()
self.assertTrue(not event.is_set())
self.ts.start_server(event, self.server)
if connect_kwargs is None:
connect_kwargs = dict(
hostkey=public_host_key,
username="slowdive",
password="pygmalion",
)
self.tc.connect(**connect_kwargs)
event.wait(1.0)
self.assertTrue(event.is_set())
self.assertTrue(self.ts.is_active())
def test_security_options(self):
o = self.tc.get_security_options()
self.assertEqual(type(o), SecurityOptions)
self.assertTrue(("aes256-cbc", "aes192-cbc") != o.ciphers)
o.ciphers = ("aes256-cbc", "aes192-cbc")
self.assertEqual(("aes256-cbc", "aes192-cbc"), o.ciphers)
try:
o.ciphers = ("aes256-cbc", "made-up-cipher")
self.assertTrue(False)
except ValueError:
pass
try:
o.ciphers = 23
self.assertTrue(False)
except TypeError:
pass
def testb_security_options_reset(self):
o = self.tc.get_security_options()
# should not throw any exceptions
o.ciphers = o.ciphers
o.digests = o.digests
o.key_types = o.key_types
o.kex = o.kex
o.compression = o.compression
def test_compute_key(self):
self.tc.K = 123281095979686581523377256114209720774539068973101330872763622971399429481072519713536292772709507296759612401802191955568143056534122385270077606457721553469730659233569339356140085284052436697480759510519672848743794433460113118986816826624865291116513647975790797391795651716378444844877749505443714557929 # noqa
self.tc.H = b"\x0C\x83\x07\xCD\xE6\x85\x6F\xF3\x0B\xA9\x36\x84\xEB\x0F\x04\xC2\x52\x0E\x9E\xD3" # noqa
self.tc.session_id = self.tc.H
key = self.tc._compute_key("C", 32)
self.assertEqual(
b"207E66594CA87C44ECCBA3B3CD39FDDB378E6FDB0F97C54B2AA0CFBF900CD995", # noqa
hexlify(key).upper(),
)
def test_simple(self):
"""
verify that we can establish an ssh link with ourselves across the
loopback sockets. this is hardly "simple" but it's simpler than the
later tests. :)
"""
host_key = RSAKey.from_private_key_file(_support("rsa.key"))
public_host_key = RSAKey(data=host_key.asbytes())
self.ts.add_server_key(host_key)
event = threading.Event()
server = NullServer()
self.assertTrue(not event.is_set())
self.assertEqual(None, self.tc.get_username())
self.assertEqual(None, self.ts.get_username())
self.assertEqual(False, self.tc.is_authenticated())
self.assertEqual(False, self.ts.is_authenticated())
self.ts.start_server(event, server)
self.tc.connect(
hostkey=public_host_key, username="slowdive", password="pygmalion"
)
event.wait(1.0)
self.assertTrue(event.is_set())
self.assertTrue(self.ts.is_active())
self.assertEqual("slowdive", self.tc.get_username())
self.assertEqual("slowdive", self.ts.get_username())
self.assertEqual(True, self.tc.is_authenticated())
self.assertEqual(True, self.ts.is_authenticated())
def test_long_banner(self):
"""
verify that a long banner doesn't mess up the handshake.
"""
host_key = RSAKey.from_private_key_file(_support("rsa.key"))
public_host_key = RSAKey(data=host_key.asbytes())
self.ts.add_server_key(host_key)
event = threading.Event()
server = NullServer()
self.assertTrue(not event.is_set())
self.socks.send(LONG_BANNER)
self.ts.start_server(event, server)
self.tc.connect(
hostkey=public_host_key, username="slowdive", password="pygmalion"
)
event.wait(1.0)
self.assertTrue(event.is_set())
self.assertTrue(self.ts.is_active())
def test_special(self):
"""
verify that the client can demand odd handshake settings, and can
renegotiate keys in mid-stream.
"""
def force_algorithms(options):
options.ciphers = ("aes256-cbc",)
options.digests = ("hmac-md5-96",)
self.setup_test_server(client_options=force_algorithms)
self.assertEqual("aes256-cbc", self.tc.local_cipher)
self.assertEqual("aes256-cbc", self.tc.remote_cipher)
self.assertEqual(12, self.tc.packetizer.get_mac_size_out())
self.assertEqual(12, self.tc.packetizer.get_mac_size_in())
self.tc.send_ignore(1024)
self.tc.renegotiate_keys()
self.ts.send_ignore(1024)
@slow
def test_keepalive(self):
"""
verify that the keepalive will be sent.
"""
self.setup_test_server()
self.assertEqual(None, getattr(self.server, "_global_request", None))
self.tc.set_keepalive(1)
time.sleep(2)
self.assertEqual("[email protected]", self.server._global_request)
def test_exec_command(self):
"""
verify that exec_command() does something reasonable.
"""
self.setup_test_server()
chan = self.tc.open_session()
schan = self.ts.accept(1.0)
try:
chan.exec_command(
b"command contains \xfc and is not a valid UTF-8 string"
)
self.assertTrue(False)
except SSHException:
pass
chan = self.tc.open_session()
chan.exec_command("yes")
schan = self.ts.accept(1.0)
schan.send("Hello there.\n")
schan.send_stderr("This is on stderr.\n")
schan.close()
f = chan.makefile()
self.assertEqual("Hello there.\n", f.readline())
self.assertEqual("", f.readline())
f = chan.makefile_stderr()
self.assertEqual("This is on stderr.\n", f.readline())
self.assertEqual("", f.readline())
# now try it with combined stdout/stderr
chan = self.tc.open_session()
chan.exec_command("yes")
schan = self.ts.accept(1.0)
schan.send("Hello there.\n")
schan.send_stderr("This is on stderr.\n")
schan.close()
chan.set_combine_stderr(True)
f = chan.makefile()
self.assertEqual("Hello there.\n", f.readline())
self.assertEqual("This is on stderr.\n", f.readline())
self.assertEqual("", f.readline())
def test_channel_can_be_used_as_context_manager(self):
"""
verify that exec_command() does something reasonable.
"""
self.setup_test_server()
with self.tc.open_session() as chan:
with self.ts.accept(1.0) as schan:
chan.exec_command("yes")
schan.send("Hello there.\n")
schan.close()
f = chan.makefile()
self.assertEqual("Hello there.\n", f.readline())
self.assertEqual("", f.readline())
def test_invoke_shell(self):
"""
verify that invoke_shell() does something reasonable.
"""
self.setup_test_server()
chan = self.tc.open_session()
chan.invoke_shell()
schan = self.ts.accept(1.0)
chan.send("communist j. cat\n")
f = schan.makefile()
self.assertEqual("communist j. cat\n", f.readline())
chan.close()
self.assertEqual("", f.readline())
def test_channel_exception(self):
"""
verify that ChannelException is thrown for a bad open-channel request.
"""
self.setup_test_server()
try:
self.tc.open_channel("bogus")
self.fail("expected exception")
except ChannelException as e:
self.assertTrue(e.code == OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED)
def test_exit_status(self):
"""
verify that get_exit_status() works.
"""
self.setup_test_server()
chan = self.tc.open_session()
schan = self.ts.accept(1.0)
chan.exec_command("yes")
schan.send("Hello there.\n")
self.assertTrue(not chan.exit_status_ready())
# trigger an EOF
schan.shutdown_read()
schan.shutdown_write()
schan.send_exit_status(23)
schan.close()
f = chan.makefile()
self.assertEqual("Hello there.\n", f.readline())
self.assertEqual("", f.readline())
count = 0
while not chan.exit_status_ready():
time.sleep(0.1)
count += 1
if count > 50:
raise Exception("timeout")
self.assertEqual(23, chan.recv_exit_status())
chan.close()
def test_select(self):
"""
verify that select() on a channel works.
"""
self.setup_test_server()
chan = self.tc.open_session()
chan.invoke_shell()
schan = self.ts.accept(1.0)
# nothing should be ready
r, w, e = select.select([chan], [], [], 0.1)
self.assertEqual([], r)
self.assertEqual([], w)
self.assertEqual([], e)
schan.send("hello\n")
# something should be ready now (give it 1 second to appear)
for i in range(10):
r, w, e = select.select([chan], [], [], 0.1)
if chan in r:
break
time.sleep(0.1)
self.assertEqual([chan], r)
self.assertEqual([], w)
self.assertEqual([], e)
self.assertEqual(b"hello\n", chan.recv(6))
# and, should be dead again now
r, w, e = select.select([chan], [], [], 0.1)
self.assertEqual([], r)
self.assertEqual([], w)
self.assertEqual([], e)
schan.close()
# detect eof?
for i in range(10):
r, w, e = select.select([chan], [], [], 0.1)
if chan in r:
break
time.sleep(0.1)
self.assertEqual([chan], r)
self.assertEqual([], w)
self.assertEqual([], e)
self.assertEqual(b"", chan.recv(16))
# make sure the pipe is still open for now...
p = chan._pipe
self.assertEqual(False, p._closed)
chan.close()
# ...and now is closed.
self.assertEqual(True, p._closed)
def test_renegotiate(self):
"""
verify that a transport can correctly renegotiate mid-stream.
"""
self.setup_test_server()
self.tc.packetizer.REKEY_BYTES = 16384
chan = self.tc.open_session()
chan.exec_command("yes")
schan = self.ts.accept(1.0)
self.assertEqual(self.tc.H, self.tc.session_id)
for i in range(20):
chan.send("x" * 1024)
chan.close()
# allow a few seconds for the rekeying to complete
for i in range(50):
if self.tc.H != self.tc.session_id:
break
time.sleep(0.1)
self.assertNotEqual(self.tc.H, self.tc.session_id)
schan.close()
def test_compression(self):
"""
verify that zlib compression is basically working.
"""
def force_compression(o):
o.compression = ("zlib",)
self.setup_test_server(force_compression, force_compression)
chan = self.tc.open_session()
chan.exec_command("yes")
schan = self.ts.accept(1.0)
bytes = self.tc.packetizer._Packetizer__sent_bytes
chan.send("x" * 1024)
bytes2 = self.tc.packetizer._Packetizer__sent_bytes
block_size = self.tc._cipher_info[self.tc.local_cipher]["block-size"]
mac_size = self.tc._mac_info[self.tc.local_mac]["size"]
# tests show this is actually compressed to *52 bytes*! including
# packet overhead! nice!! :)
self.assertTrue(bytes2 - bytes < 1024)
self.assertEqual(16 + block_size + mac_size, bytes2 - bytes)
chan.close()
schan.close()
def test_x11(self):
"""
verify that an x11 port can be requested and opened.
"""
self.setup_test_server()
chan = self.tc.open_session()
chan.exec_command("yes")
schan = self.ts.accept(1.0)
requested = []
def handler(c, addr_port):
addr, port = addr_port
requested.append((addr, port))
self.tc._queue_incoming_channel(c)
self.assertEqual(
None, getattr(self.server, "_x11_screen_number", None)
)
cookie = chan.request_x11(0, single_connection=True, handler=handler)
self.assertEqual(0, self.server._x11_screen_number)
self.assertEqual("MIT-MAGIC-COOKIE-1", self.server._x11_auth_protocol)
self.assertEqual(cookie, self.server._x11_auth_cookie)
self.assertEqual(True, self.server._x11_single_connection)
x11_server = self.ts.open_x11_channel(("localhost", 6093))
x11_client = self.tc.accept()
self.assertEqual("localhost", requested[0][0])
self.assertEqual(6093, requested[0][1])
x11_server.send("hello")
self.assertEqual(b"hello", x11_client.recv(5))
x11_server.close()
x11_client.close()
chan.close()
schan.close()
def test_reverse_port_forwarding(self):
"""
verify that a client can ask the server to open a reverse port for
forwarding.
"""
self.setup_test_server()
chan = self.tc.open_session()
chan.exec_command("yes")
self.ts.accept(1.0)
requested = []
def handler(c, origin_addr_port, server_addr_port):
requested.append(origin_addr_port)
requested.append(server_addr_port)
self.tc._queue_incoming_channel(c)
port = self.tc.request_port_forward("127.0.0.1", 0, handler)
self.assertEqual(port, self.server._listen.getsockname()[1])
cs = socket.socket()
cs.connect(("127.0.0.1", port))
ss, _ = self.server._listen.accept()
sch = self.ts.open_forwarded_tcpip_channel(
ss.getsockname(), ss.getpeername()
)
cch = self.tc.accept()
sch.send("hello")
self.assertEqual(b"hello", cch.recv(5))
sch.close()
cch.close()
ss.close()
cs.close()
# now cancel it.
self.tc.cancel_port_forward("127.0.0.1", port)
self.assertTrue(self.server._listen is None)
def test_port_forwarding(self):
"""
verify that a client can forward new connections from a locally-
forwarded port.
"""
self.setup_test_server()
chan = self.tc.open_session()
chan.exec_command("yes")
self.ts.accept(1.0)
# open a port on the "server" that the client will ask to forward to.
greeting_server = socket.socket()
greeting_server.bind(("127.0.0.1", 0))
greeting_server.listen(1)
greeting_port = greeting_server.getsockname()[1]
cs = self.tc.open_channel(
"direct-tcpip", ("127.0.0.1", greeting_port), ("", 9000)
)
sch = self.ts.accept(1.0)
cch = socket.socket()
cch.connect(self.server._tcpip_dest)
ss, _ = greeting_server.accept()
ss.send(b"Hello!\n")
ss.close()
sch.send(cch.recv(8192))
sch.close()
self.assertEqual(b"Hello!\n", cs.recv(7))
cs.close()
def test_stderr_select(self):
"""
verify that select() on a channel works even if only stderr is
receiving data.
"""
self.setup_test_server()
chan = self.tc.open_session()
chan.invoke_shell()
schan = self.ts.accept(1.0)
# nothing should be ready
r, w, e = select.select([chan], [], [], 0.1)
self.assertEqual([], r)
self.assertEqual([], w)
self.assertEqual([], e)
schan.send_stderr("hello\n")
# something should be ready now (give it 1 second to appear)
for i in range(10):
r, w, e = select.select([chan], [], [], 0.1)
if chan in r:
break
time.sleep(0.1)
self.assertEqual([chan], r)
self.assertEqual([], w)
self.assertEqual([], e)
self.assertEqual(b"hello\n", chan.recv_stderr(6))
# and, should be dead again now
r, w, e = select.select([chan], [], [], 0.1)
self.assertEqual([], r)
self.assertEqual([], w)
self.assertEqual([], e)
schan.close()
chan.close()
def test_send_ready(self):
"""
verify that send_ready() indicates when a send would not block.
"""
self.setup_test_server()
chan = self.tc.open_session()
chan.invoke_shell()
schan = self.ts.accept(1.0)
self.assertEqual(chan.send_ready(), True)
total = 0
K = "*" * 1024
limit = 1 + (64 * 2**15)
while total < limit:
chan.send(K)
total += len(K)
if not chan.send_ready():
break
self.assertTrue(total < limit)
schan.close()
chan.close()
self.assertEqual(chan.send_ready(), True)
def test_rekey_deadlock(self):
"""
Regression test for deadlock when in-transit messages are received
after MSG_KEXINIT is sent
Note: When this test fails, it may leak threads.
"""
# Test for an obscure deadlocking bug that can occur if we receive
# certain messages while initiating a key exchange.
#
# The deadlock occurs as follows:
#
# In the main thread:
# 1. The user's program calls Channel.send(), which sends
# MSG_CHANNEL_DATA to the remote host.
# 2. Packetizer discovers that REKEY_BYTES has been exceeded, and
# sets the __need_rekey flag.
#
# In the Transport thread:
# 3. Packetizer notices that the __need_rekey flag is set, and raises
# NeedRekeyException.
# 4. In response to NeedRekeyException, the transport thread sends
# MSG_KEXINIT to the remote host.
#
# On the remote host (using any SSH implementation):
# 5. The MSG_CHANNEL_DATA is received, and MSG_CHANNEL_WINDOW_ADJUST
# is sent.
# 6. The MSG_KEXINIT is received, and a corresponding MSG_KEXINIT is
# sent.
#
# In the main thread:
# 7. The user's program calls Channel.send().
# 8. Channel.send acquires Channel.lock, then calls
# Transport._send_user_message().
# 9. Transport._send_user_message waits for Transport.clear_to_send
# to be set (i.e., it waits for re-keying to complete).
# Channel.lock is still held.
#
# In the Transport thread:
# 10. MSG_CHANNEL_WINDOW_ADJUST is received; Channel._window_adjust
# is called to handle it.
# 11. Channel._window_adjust tries to acquire Channel.lock, but it
# blocks because the lock is already held by the main thread.
#
# The result is that the Transport thread never processes the remote
# host's MSG_KEXINIT packet, because it becomes deadlocked while
# handling the preceding MSG_CHANNEL_WINDOW_ADJUST message.
# We set up two separate threads for sending and receiving packets,
# while the main thread acts as a watchdog timer. If the timer
# expires, a deadlock is assumed.
class SendThread(threading.Thread):
def __init__(self, chan, iterations, done_event):
threading.Thread.__init__(
self, None, None, self.__class__.__name__
)
self.daemon = True
self.chan = chan
self.iterations = iterations
self.done_event = done_event
self.watchdog_event = threading.Event()
self.last = None
def run(self):
try:
for i in range(1, 1 + self.iterations):
if self.done_event.is_set():
break
self.watchdog_event.set()
# print i, "SEND"
self.chan.send("x" * 2048)
finally:
self.done_event.set()
self.watchdog_event.set()
class ReceiveThread(threading.Thread):
def __init__(self, chan, done_event):
threading.Thread.__init__(
self, None, None, self.__class__.__name__
)
self.daemon = True
self.chan = chan
self.done_event = done_event
self.watchdog_event = threading.Event()
def run(self):
try:
while not self.done_event.is_set():
if self.chan.recv_ready():
chan.recv(65536)
self.watchdog_event.set()
else:
if random.randint(0, 1):
time.sleep(random.randint(0, 500) / 1000.0)
finally:
self.done_event.set()
self.watchdog_event.set()
self.setup_test_server()
self.ts.packetizer.REKEY_BYTES = 2048
chan = self.tc.open_session()
chan.exec_command("yes")
schan = self.ts.accept(1.0)
# Monkey patch the client's Transport._handler_table so that the client
# sends MSG_CHANNEL_WINDOW_ADJUST whenever it receives an initial
# MSG_KEXINIT. This is used to simulate the effect of network latency
# on a real MSG_CHANNEL_WINDOW_ADJUST message.
self.tc._handler_table = (
self.tc._handler_table.copy()
) # copy per-class dictionary
_negotiate_keys = self.tc._handler_table[MSG_KEXINIT]
def _negotiate_keys_wrapper(self, m):
if self.local_kex_init is None: # Remote side sent KEXINIT
# Simulate in-transit MSG_CHANNEL_WINDOW_ADJUST by sending it
# before responding to the incoming MSG_KEXINIT.
m2 = Message()
m2.add_byte(cMSG_CHANNEL_WINDOW_ADJUST)
m2.add_int(chan.remote_chanid)
m2.add_int(1) # bytes to add
self._send_message(m2)
return _negotiate_keys(self, m)
self.tc._handler_table[MSG_KEXINIT] = _negotiate_keys_wrapper
# Parameters for the test
iterations = 500 # The deadlock does not happen every time, but it
# should after many iterations.
timeout = 5
# This event is set when the test is completed
done_event = threading.Event()
# Start the sending thread
st = SendThread(schan, iterations, done_event)
st.start()
# Start the receiving thread
rt = ReceiveThread(chan, done_event)
rt.start()
# Act as a watchdog timer, checking
deadlocked = False
while not deadlocked and not done_event.is_set():
for event in (st.watchdog_event, rt.watchdog_event):
event.wait(timeout)
if done_event.is_set():
break
if not event.is_set():
deadlocked = True
break
event.clear()
# Tell the threads to stop (if they haven't already stopped). Note
# that if one or more threads are deadlocked, they might hang around
# forever (until the process exits).
done_event.set()
# Assertion: We must not have detected a timeout.
self.assertFalse(deadlocked)
# Close the channels
schan.close()
chan.close()
def test_sanitze_packet_size(self):
"""
verify that we conform to the rfc of packet and window sizes.
"""
for val, correct in [
(4095, MIN_PACKET_SIZE),
(None, DEFAULT_MAX_PACKET_SIZE),
(2**32, MAX_WINDOW_SIZE),
]:
self.assertEqual(self.tc._sanitize_packet_size(val), correct)
def test_sanitze_window_size(self):
"""
verify that we conform to the rfc of packet and window sizes.
"""
for val, correct in [
(32767, MIN_WINDOW_SIZE),
(None, DEFAULT_WINDOW_SIZE),
(2**32, MAX_WINDOW_SIZE),
]:
self.assertEqual(self.tc._sanitize_window_size(val), correct)
@slow
def test_handshake_timeout(self):
"""
verify that we can get a handshake timeout.
"""
# Tweak client Transport instance's Packetizer instance so
# its read_message() sleeps a bit. This helps prevent race conditions
# where the client Transport's timeout timer thread doesn't even have
# time to get scheduled before the main client thread finishes
# handshaking with the server.
# (Doing this on the server's transport *sounds* more 'correct' but
# actually doesn't work nearly as well for whatever reason.)
class SlowPacketizer(Packetizer):
def read_message(self):
time.sleep(1)
return super().read_message()
# NOTE: prettttty sure since the replaced .packetizer Packetizer is now
# no longer doing anything with its copy of the socket...everything'll
# be fine. Even tho it's a bit squicky.
self.tc.packetizer = SlowPacketizer(self.tc.sock)
# Continue with regular test red tape.
host_key = RSAKey.from_private_key_file(_support("rsa.key"))
public_host_key = RSAKey(data=host_key.asbytes())
self.ts.add_server_key(host_key)
event = threading.Event()
server = NullServer()
self.assertTrue(not event.is_set())
self.tc.handshake_timeout = 0.000000000001
self.ts.start_server(event, server)
self.assertRaises(
EOFError,
self.tc.connect,
hostkey=public_host_key,
username="slowdive",
password="pygmalion",
)
def test_select_after_close(self):
"""
verify that select works when a channel is already closed.
"""
self.setup_test_server()
chan = self.tc.open_session()
chan.invoke_shell()
schan = self.ts.accept(1.0)
schan.close()
# give client a moment to receive close notification
time.sleep(0.1)
r, w, e = select.select([chan], [], [], 0.1)
self.assertEqual([chan], r)
self.assertEqual([], w)
self.assertEqual([], e)
def test_channel_send_misc(self):
"""
verify behaviours sending various instances to a channel
"""
self.setup_test_server()
text = "\xa7 slice me nicely"
with self.tc.open_session() as chan:
schan = self.ts.accept(1.0)
if schan is None:
self.fail("Test server transport failed to accept")
sfile = schan.makefile()
# TypeError raised on non string or buffer type
self.assertRaises(TypeError, chan.send, object())
self.assertRaises(TypeError, chan.sendall, object())
# sendall() accepts a unicode instance
chan.sendall(text)
expected = text.encode("utf-8")
self.assertEqual(sfile.read(len(expected)), expected)
@needs_builtin("buffer")
def test_channel_send_buffer(self):
"""
verify sending buffer instances to a channel
"""
self.setup_test_server()
data = 3 * b"some test data\n whole"
with self.tc.open_session() as chan:
schan = self.ts.accept(1.0)
if schan is None:
self.fail("Test server transport failed to accept")
sfile = schan.makefile()
# send() accepts buffer instances
sent = 0
while sent < len(data):
sent += chan.send(buffer(data, sent, 8)) # noqa
self.assertEqual(sfile.read(len(data)), data)
# sendall() accepts a buffer instance
chan.sendall(buffer(data)) # noqa
self.assertEqual(sfile.read(len(data)), data)
@needs_builtin("memoryview")
def test_channel_send_memoryview(self):
"""
verify sending memoryview instances to a channel
"""
self.setup_test_server()
data = 3 * b"some test data\n whole"
with self.tc.open_session() as chan:
schan = self.ts.accept(1.0)
if schan is None:
self.fail("Test server transport failed to accept")
sfile = schan.makefile()
# send() accepts memoryview slices
sent = 0
view = memoryview(data)
while sent < len(view):
sent += chan.send(view[sent : sent + 8])
self.assertEqual(sfile.read(len(data)), data)
# sendall() accepts a memoryview instance
chan.sendall(memoryview(data))
self.assertEqual(sfile.read(len(data)), data)
def test_server_rejects_open_channel_without_auth(self):
try:
self.setup_test_server(connect_kwargs={})
self.tc.open_session()
except ChannelException as e:
assert e.code == OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
else:
assert False, "Did not raise ChannelException!"
def test_server_rejects_arbitrary_global_request_without_auth(self):
self.setup_test_server(connect_kwargs={})
# NOTE: this dummy global request kind would normally pass muster
# from the test server.
self.tc.global_request("acceptable")
# Global requests never raise exceptions, even on failure (not sure why
# this was the original design...ugh.) Best we can do to tell failure
# happened is that the client transport's global_response was set back
# to None; if it had succeeded, it would be the response Message.
err = "Unauthed global response incorrectly succeeded!"
assert self.tc.global_response is None, err
def test_server_rejects_port_forward_without_auth(self):
# NOTE: at protocol level port forward requests are treated same as a
# regular global request, but Paramiko server implements a special-case
# method for it, so it gets its own test. (plus, THAT actually raises
# an exception on the client side, unlike the general case...)
self.setup_test_server(connect_kwargs={})
try:
self.tc.request_port_forward("localhost", 1234)
except SSHException as e:
assert "forwarding request denied" in str(e)
else:
assert False, "Did not raise SSHException!"
def _send_unimplemented(self, server_is_sender):
self.setup_test_server()
sender, recipient = self.tc, self.ts
if server_is_sender:
sender, recipient = self.ts, self.tc
recipient._send_message = Mock()
msg = Message()
msg.add_byte(cMSG_UNIMPLEMENTED)
sender._send_message(msg)
# TODO: I hate this but I literally don't see a good way to know when
# the recipient has received the sender's message (there are no
# existing threading events in play that work for this), esp in this
# case where we don't WANT a response (as otherwise we could
# potentially try blocking on the sender's receipt of a reply...maybe).
time.sleep(0.1)
assert not recipient._send_message.called
def test_server_does_not_respond_to_MSG_UNIMPLEMENTED(self):
self._send_unimplemented(server_is_sender=False)
def test_client_does_not_respond_to_MSG_UNIMPLEMENTED(self):
self._send_unimplemented(server_is_sender=True)
def _send_client_message(self, message_type):
self.setup_test_server(connect_kwargs={})
self.ts._send_message = Mock()
# NOTE: this isn't 100% realistic (most of these message types would
# have actual other fields in 'em) but it suffices to test the level of
# message dispatch we're interested in here.
msg = Message()
# TODO: really not liking the whole cMSG_XXX vs MSG_XXX duality right
# now, esp since the former is almost always just byte_chr(the
# latter)...but since that's the case...
msg.add_byte(byte_chr(message_type))
self.tc._send_message(msg)
# No good way to actually wait for server action (see above tests re:
# MSG_UNIMPLEMENTED). Grump.
time.sleep(0.1)
def _expect_unimplemented(self):
# Ensure MSG_UNIMPLEMENTED was sent (implies it hit end of loop instead
# of truly handling the given message).
# NOTE: When bug present, this will actually be the first thing that
# fails (since in many cases actual message handling doesn't involve
# sending a message back right away).
assert self.ts._send_message.call_count == 1
reply = self.ts._send_message.call_args[0][0]
reply.rewind() # Because it's pre-send, not post-receive
assert reply.get_byte() == cMSG_UNIMPLEMENTED
def test_server_transports_reject_client_message_types(self):
# TODO: handle Transport's own tables too, not just its inner auth
# handler's table. See TODOs in auth_handler.py
some_handler = self._auth_handler_class(self.tc)
for message_type in some_handler._client_handler_table:
self._send_client_message(message_type)
self._expect_unimplemented()
# Reset for rest of loop
self.tearDown()
self.setUp()
def test_server_rejects_client_MSG_USERAUTH_SUCCESS(self):
self._send_client_message(MSG_USERAUTH_SUCCESS)
# Sanity checks
assert not self.ts.authenticated
assert not self.ts.auth_handler.authenticated
# Real fix's behavior
self._expect_unimplemented()
def test_can_override_packetizer_used(self):
class MyPacketizer(Packetizer):
pass
# control case
assert Transport(sock=LoopSocket()).packetizer.__class__ is Packetizer
# overridden case
tweaked = Transport(sock=LoopSocket(), packetizer_class=MyPacketizer)
assert tweaked.packetizer.__class__ is MyPacketizer
# TODO: for now this is purely a regression test. It needs actual tests of the
# intentional new behavior too!
class ServiceRequestingTransportTest(TransportTest):
_auth_handler_class = AuthOnlyHandler
def setUp(self):
# Copypasta (Transport init is load-bearing)
self.socks = LoopSocket()
self.sockc = LoopSocket()
self.sockc.link(self.socks)
# New class who dis
self.tc = ServiceRequestingTransport(self.sockc)
self.ts = ServiceRequestingTransport(self.socks)
class AlgorithmDisablingTests(unittest.TestCase):
def test_preferred_lists_default_to_private_attribute_contents(self):
t = Transport(sock=Mock())
assert t.preferred_ciphers == t._preferred_ciphers
assert t.preferred_macs == t._preferred_macs
assert t.preferred_keys == tuple(
t._preferred_keys
+ tuple(
"{}[email protected]".format(x) for x in t._preferred_keys
)
)
assert t.preferred_kex == t._preferred_kex
def test_preferred_lists_filter_disabled_algorithms(self):
t = Transport(
sock=Mock(),
disabled_algorithms={
"ciphers": ["aes128-cbc"],
"macs": ["hmac-md5"],
"keys": ["ssh-dss"],
"kex": ["diffie-hellman-group14-sha256"],
},
)
assert "aes128-cbc" in t._preferred_ciphers
assert "aes128-cbc" not in t.preferred_ciphers
assert "hmac-md5" in t._preferred_macs
assert "hmac-md5" not in t.preferred_macs
assert "ssh-dss" in t._preferred_keys
assert "ssh-dss" not in t.preferred_keys
assert "[email protected]" not in t.preferred_keys
assert "diffie-hellman-group14-sha256" in t._preferred_kex
assert "diffie-hellman-group14-sha256" not in t.preferred_kex
def test_implementation_refers_to_public_algo_lists(self):
t = Transport(
sock=Mock(),
disabled_algorithms={
"ciphers": ["aes128-cbc"],
"macs": ["hmac-md5"],
"keys": ["ssh-dss"],
"kex": ["diffie-hellman-group14-sha256"],
"compression": ["zlib"],
},
)
# Enable compression cuz otherwise disabling one option for it makes no
# sense...
t.use_compression(True)
# Effectively a random spot check, but kex init touches most/all of the
# algorithm lists so it's a good spot.
t._send_message = Mock()
t._send_kex_init()
# Cribbed from Transport._parse_kex_init, which didn't feel worth
# refactoring given all the vars involved :(
m = t._send_message.call_args[0][0]
m.rewind()
m.get_byte() # the msg type
m.get_bytes(16) # cookie, discarded
kexen = m.get_list()
server_keys = m.get_list()
ciphers = m.get_list()
m.get_list()
macs = m.get_list()
m.get_list()
compressions = m.get_list()
# OK, now we can actually check that our disabled algos were not
# included (as this message includes the full lists)
assert "aes128-cbc" not in ciphers
assert "hmac-md5" not in macs
assert "ssh-dss" not in server_keys
assert "diffie-hellman-group14-sha256" not in kexen
assert "zlib" not in compressions
class TestSHA2SignatureKeyExchange(unittest.TestCase):
# NOTE: these all rely on the default server() hostkey being RSA
# NOTE: these rely on both sides being properly implemented re: agreed-upon
# hostkey during kex being what's actually used. Truly proving that eg
# SHA512 was used, is quite difficult w/o super gross hacks. However, there
# are new tests in test_pkey.py which use known signature blobs to prove
# the SHA2 family was in fact used!
@requires_sha1_signing
def test_base_case_ssh_rsa_still_used_as_fallback(self):
# Prove that ssh-rsa is used if either, or both, participants have SHA2
# algorithms disabled
for which in ("init", "client_init", "server_init"):
with server(**{which: _disable_sha2}) as (tc, _):
assert tc.host_key_type == "ssh-rsa"
def test_kex_with_sha2_512(self):
# It's the default!
with server() as (tc, _):
assert tc.host_key_type == "rsa-sha2-512"
def test_kex_with_sha2_256(self):
# No 512 -> you get 256
with server(
init=dict(disabled_algorithms=dict(keys=["rsa-sha2-512"]))
) as (tc, _):
assert tc.host_key_type == "rsa-sha2-256"
def _incompatible_peers(self, client_init, server_init):
with server(
client_init=client_init, server_init=server_init, catch_error=True
) as (tc, ts, err):
# If neither side blew up then that's bad!
assert err is not None
# If client side blew up first, it'll be straightforward
if isinstance(err, IncompatiblePeer):
pass
# If server side blew up first, client sees EOF & we need to check
# the server transport for its saved error (otherwise it can only
# appear in log output)
elif isinstance(err, EOFError):
assert ts.saved_exception is not None
assert isinstance(ts.saved_exception, IncompatiblePeer)
# If it was something else, welp
else:
raise err
def test_client_sha2_disabled_server_sha1_disabled_no_match(self):
self._incompatible_peers(
client_init=_disable_sha2, server_init=_disable_sha1
)
def test_client_sha1_disabled_server_sha2_disabled_no_match(self):
self._incompatible_peers(
client_init=_disable_sha1, server_init=_disable_sha2
)
def test_explicit_client_hostkey_not_limited(self):
# Be very explicit about the hostkey on BOTH ends,
# and ensure it still ends up choosing sha2-512.
# (This is a regression test vs previous implementation which overwrote
# the entire preferred-hostkeys structure when given an explicit key as
# a client.)
hostkey = RSAKey.from_private_key_file(_support("rsa.key"))
connect = dict(
hostkey=hostkey, username="slowdive", password="pygmalion"
)
with server(hostkey=hostkey, connect=connect) as (tc, _):
assert tc.host_key_type == "rsa-sha2-512"
class TestExtInfo(unittest.TestCase):
def test_ext_info_handshake_exposed_in_client_kexinit(self):
with server() as (tc, _):
# NOTE: this is latest KEXINIT /sent by us/ (Transport retains it)
kex = tc._get_latest_kex_init()
# flag in KexAlgorithms list
assert "ext-info-c" in kex["kex_algo_list"]
# data stored on Transport after hearing back from a compatible
# server (such as ourselves in server mode)
assert tc.server_extensions == {
"server-sig-algs": b"ssh-ed25519,ecdsa-sha2-nistp256,ecdsa-sha2-nistp384,ecdsa-sha2-nistp521,rsa-sha2-512,rsa-sha2-256,ssh-rsa,ssh-dss" # noqa
}
def test_client_uses_server_sig_algs_for_pubkey_auth(self):
privkey = RSAKey.from_private_key_file(_support("rsa.key"))
with server(
pubkeys=[privkey],
connect=dict(pkey=privkey),
server_init=dict(
disabled_algorithms=dict(pubkeys=["rsa-sha2-512"])
),
) as (tc, _):
assert tc.is_authenticated()
# Client settled on 256 despite itself not having 512 disabled (and
# otherwise, 512 would have been earlier in the preferred list)
assert tc._agreed_pubkey_algorithm == "rsa-sha2-256"
class BadSeqPacketizer(Packetizer):
def read_message(self):
cmd, msg = super().read_message()
# Only mess w/ seqno if kexinit.
if cmd is MSG_KEXINIT:
# NOTE: this is /only/ the copy of the seqno which gets
# transmitted up from Packetizer; it's not modifying
# Packetizer's own internal seqno. For these tests,
# modifying the latter isn't required, and is also harder
# to do w/o triggering MAC mismatches.
msg.seqno = 17 # arbitrary nonzero int
return cmd, msg
class TestStrictKex:
def test_kex_algos_includes_kex_strict_c(self):
with server() as (tc, _):
kex = tc._get_latest_kex_init()
assert "[email protected]" in kex["kex_algo_list"]
@mark.parametrize(
"server_active,client_active",
itertools.product([True, False], repeat=2),
)
def test_mode_agreement(self, server_active, client_active):
with server(
server_init=dict(strict_kex=server_active),
client_init=dict(strict_kex=client_active),
) as (tc, ts):
if server_active and client_active:
assert tc.agreed_on_strict_kex is True
assert ts.agreed_on_strict_kex is True
else:
assert tc.agreed_on_strict_kex is False
assert ts.agreed_on_strict_kex is False
def test_mode_advertised_by_default(self):
# NOTE: no explicit strict_kex overrides...
with server() as (tc, ts):
assert all(
(
tc.advertise_strict_kex,
tc.agreed_on_strict_kex,
ts.advertise_strict_kex,
ts.agreed_on_strict_kex,
)
)
@mark.parametrize(
"ptype",
(
# "normal" but definitely out-of-order message
MSG_CHANNEL_OPEN,
# Normally ignored, but not in this case
MSG_IGNORE,
# Normally triggers debug parsing, but not in this case
MSG_DEBUG,
# Normally ignored, but...you get the idea
MSG_UNIMPLEMENTED,
# Not real, so would normally trigger us /sending/
# MSG_UNIMPLEMENTED, but...
MSG_FUGGEDABOUTIT,
),
)
def test_MessageOrderError_non_kex_messages_in_initial_kex(self, ptype):
class AttackTransport(Transport):
# Easiest apparent spot on server side which is:
# - late enough for both ends to have handshook on strict mode
# - early enough to be in the window of opportunity for Terrapin
# attack; essentially during actual kex, when the engine is
# waiting for things like MSG_KEXECDH_REPLY (for eg curve25519).
def _negotiate_keys(self, m):
self.clear_to_send_lock.acquire()
try:
self.clear_to_send.clear()
finally:
self.clear_to_send_lock.release()
if self.local_kex_init is None:
# remote side wants to renegotiate
self._send_kex_init()
self._parse_kex_init(m)
# Here, we would normally kick over to kex_engine, but instead
# we want the server to send the OOO message.
m = Message()
m.add_byte(byte_chr(ptype))
# rest of packet unnecessary...
self._send_message(m)
with raises(MessageOrderError):
with server(server_transport_factory=AttackTransport) as (tc, _):
pass # above should run and except during connect()
def test_SSHException_raised_on_out_of_order_messages_when_not_strict(
self,
):
# This is kind of dumb (either situation is still fatal!) but whatever,
# may as well be strict with our new strict flag...
with raises(SSHException) as info: # would be true either way, but
with server(
client_init=dict(strict_kex=False),
) as (tc, _):
tc._expect_packet(MSG_KEXINIT)
tc.open_session()
assert info.type is SSHException # NOT MessageOrderError!
def test_error_not_raised_when_kexinit_not_seq_0_but_unstrict(self):
with server(
client_init=dict(
# Disable strict kex
strict_kex=False,
# Give our clientside a packetizer that sets all kexinit
# Message objects to have .seqno==17, which would trigger the
# new logic if we'd forgotten to wrap it in strict-kex check
packetizer_class=BadSeqPacketizer,
),
):
pass # kexinit happens at connect...
def test_MessageOrderError_raised_when_kexinit_not_seq_0_and_strict(self):
with raises(MessageOrderError):
with server(
# Give our clientside a packetizer that sets all kexinit
# Message objects to have .seqno==17, which should trigger the
# new logic (given we are NOT disabling strict-mode)
client_init=dict(packetizer_class=BadSeqPacketizer),
):
pass # kexinit happens at connect...
def test_sequence_numbers_reset_on_newkeys_when_strict(self):
with server(defer=True) as (tc, ts):
# When in strict mode, these should all be zero or close to it
# (post-kexinit, pre-auth).
# Server->client will be 1 (EXT_INFO got sent after NEWKEYS)
assert tc.packetizer._Packetizer__sequence_number_in == 1
assert ts.packetizer._Packetizer__sequence_number_out == 1
# Client->server will be 0
assert tc.packetizer._Packetizer__sequence_number_out == 0
assert ts.packetizer._Packetizer__sequence_number_in == 0
def test_sequence_numbers_not_reset_on_newkeys_when_not_strict(self):
with server(defer=True, client_init=dict(strict_kex=False)) as (
tc,
ts,
):
# When not in strict mode, these will all be ~3-4 or so
# (post-kexinit, pre-auth). Not encoding exact values as it will
# change anytime we mess with the test harness...
assert tc.packetizer._Packetizer__sequence_number_in != 0
assert tc.packetizer._Packetizer__sequence_number_out != 0
assert ts.packetizer._Packetizer__sequence_number_in != 0
assert ts.packetizer._Packetizer__sequence_number_out != 0
def test_sequence_number_rollover_detected(self):
class RolloverTransport(Transport):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Induce an about-to-rollover seqno, such that it rolls over
# during initial kex. (Sequence numbers are uint32, so we need
# the largest possible 32bit integer such that incrementing it
# will roll over to 0.)
last_seq = 2**32 - 1
setattr(
self.packetizer,
"_Packetizer__sequence_number_in",
last_seq,
)
setattr(
self.packetizer,
"_Packetizer__sequence_number_out",
last_seq,
)
with raises(
SSHException,
match=r"Sequence number rolled over during initial kex!",
):
with server(
client_init=dict(
# Disable strict kex - this should happen always
strict_kex=False,
),
# Transport which tickles its packetizer seqno's
transport_factory=RolloverTransport,
):
pass # kexinit happens at connect...
| 54,225 | Python | .py | 1,276 | 32.273511 | 337 | 0.599636 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
564 | test_ssh_exception.py | paramiko_paramiko/tests/test_ssh_exception.py | import pickle
import unittest
from paramiko import RSAKey
from paramiko.ssh_exception import (
NoValidConnectionsError,
BadAuthenticationType,
PartialAuthentication,
ChannelException,
BadHostKeyException,
ProxyCommandFailure,
)
class NoValidConnectionsErrorTest(unittest.TestCase):
def test_pickling(self):
# Regression test for https://github.com/paramiko/paramiko/issues/617
exc = NoValidConnectionsError({("127.0.0.1", "22"): Exception()})
new_exc = pickle.loads(pickle.dumps(exc))
self.assertEqual(type(exc), type(new_exc))
self.assertEqual(str(exc), str(new_exc))
self.assertEqual(exc.args, new_exc.args)
def test_error_message_for_single_host(self):
exc = NoValidConnectionsError({("127.0.0.1", "22"): Exception()})
assert "Unable to connect to port 22 on 127.0.0.1" in str(exc)
def test_error_message_for_two_hosts(self):
exc = NoValidConnectionsError(
{("127.0.0.1", "22"): Exception(), ("::1", "22"): Exception()}
)
assert "Unable to connect to port 22 on 127.0.0.1 or ::1" in str(exc)
def test_error_message_for_multiple_hosts(self):
exc = NoValidConnectionsError(
{
("127.0.0.1", "22"): Exception(),
("::1", "22"): Exception(),
("10.0.0.42", "22"): Exception(),
}
)
exp = "Unable to connect to port 22 on 10.0.0.42, 127.0.0.1 or ::1"
assert exp in str(exc)
class ExceptionStringDisplayTest(unittest.TestCase):
def test_BadAuthenticationType(self):
exc = BadAuthenticationType(
"Bad authentication type", ["ok", "also-ok"]
)
expected = "Bad authentication type; allowed types: ['ok', 'also-ok']"
assert str(exc) == expected
def test_PartialAuthentication(self):
exc = PartialAuthentication(["ok", "also-ok"])
expected = "Partial authentication; allowed types: ['ok', 'also-ok']"
assert str(exc) == expected
def test_BadHostKeyException(self):
got_key = RSAKey.generate(2048)
wanted_key = RSAKey.generate(2048)
exc = BadHostKeyException("myhost", got_key, wanted_key)
expected = "Host key for server 'myhost' does not match: got '{}', expected '{}'" # noqa
assert str(exc) == expected.format(
got_key.get_base64(), wanted_key.get_base64()
)
def test_ProxyCommandFailure(self):
exc = ProxyCommandFailure("man squid", 7)
expected = 'ProxyCommand("man squid") returned nonzero exit status: 7'
assert str(exc) == expected
def test_ChannelException(self):
exc = ChannelException(17, "whatever")
assert str(exc) == "ChannelException(17, 'whatever')"
| 2,798 | Python | .py | 63 | 36.365079 | 97 | 0.633492 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
565 | test_channelfile.py | paramiko_paramiko/tests/test_channelfile.py | from unittest.mock import patch, MagicMock
from paramiko import Channel, ChannelFile, ChannelStderrFile, ChannelStdinFile
class ChannelFileBase:
@patch("paramiko.channel.ChannelFile._set_mode")
def test_defaults_to_unbuffered_reading(self, setmode):
self.klass(Channel(None))
setmode.assert_called_once_with("r", -1)
@patch("paramiko.channel.ChannelFile._set_mode")
def test_can_override_mode_and_bufsize(self, setmode):
self.klass(Channel(None), mode="w", bufsize=25)
setmode.assert_called_once_with("w", 25)
def test_read_recvs_from_channel(self):
chan = MagicMock()
cf = self.klass(chan)
cf.read(100)
chan.recv.assert_called_once_with(100)
def test_write_calls_channel_sendall(self):
chan = MagicMock()
cf = self.klass(chan, mode="w")
cf.write("ohai")
chan.sendall.assert_called_once_with(b"ohai")
class TestChannelFile(ChannelFileBase):
klass = ChannelFile
class TestChannelStderrFile:
def test_read_calls_channel_recv_stderr(self):
chan = MagicMock()
cf = ChannelStderrFile(chan)
cf.read(100)
chan.recv_stderr.assert_called_once_with(100)
def test_write_calls_channel_sendall(self):
chan = MagicMock()
cf = ChannelStderrFile(chan, mode="w")
cf.write("ohai")
chan.sendall_stderr.assert_called_once_with(b"ohai")
class TestChannelStdinFile(ChannelFileBase):
klass = ChannelStdinFile
def test_close_calls_channel_shutdown_write(self):
chan = MagicMock()
cf = ChannelStdinFile(chan, mode="wb")
cf.flush = MagicMock()
cf.close()
# Sanity check that we still call BufferedFile.close()
cf.flush.assert_called_once_with()
assert cf._closed is True
# Actual point of test
chan.shutdown_write.assert_called_once_with()
| 1,904 | Python | .py | 46 | 34.086957 | 78 | 0.678416 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
566 | test_client.py | paramiko_paramiko/tests/test_client.py | # Copyright (C) 2003-2009 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Some unit tests for SSHClient.
"""
import gc
import os
import platform
import socket
import threading
import time
import unittest
import warnings
import weakref
from tempfile import mkstemp
import pytest
from pytest_relaxed import raises
from unittest.mock import patch, Mock
import paramiko
from paramiko import SSHClient
from paramiko.pkey import PublicBlob
from paramiko.ssh_exception import SSHException, AuthenticationException
from ._util import _support, requires_sha1_signing, slow
requires_gss_auth = unittest.skipUnless(
paramiko.GSS_AUTH_AVAILABLE, "GSS auth not available"
)
FINGERPRINTS = {
"ssh-dss": b"\x44\x78\xf0\xb9\xa2\x3c\xc5\x18\x20\x09\xff\x75\x5b\xc1\xd2\x6c", # noqa
"ssh-rsa": b"\x60\x73\x38\x44\xcb\x51\x86\x65\x7f\xde\xda\xa2\x2b\x5a\x57\xd5", # noqa
"ecdsa-sha2-nistp256": b"\x25\x19\xeb\x55\xe6\xa1\x47\xff\x4f\x38\xd2\x75\x6f\xa5\xd5\x60", # noqa
"ssh-ed25519": b'\xb3\xd5"\xaa\xf9u^\xe8\xcd\x0e\xea\x02\xb9)\xa2\x80',
}
class NullServer(paramiko.ServerInterface):
def __init__(self, *args, **kwargs):
# Allow tests to enable/disable specific key types
self.__allowed_keys = kwargs.pop("allowed_keys", [])
# And allow them to set a (single...meh) expected public blob (cert)
self.__expected_public_blob = kwargs.pop("public_blob", None)
super().__init__(*args, **kwargs)
def get_allowed_auths(self, username):
if username == "slowdive":
return "publickey,password"
return "publickey"
def check_auth_password(self, username, password):
if (username == "slowdive") and (password == "pygmalion"):
return paramiko.AUTH_SUCCESSFUL
if (username == "slowdive") and (password == "unresponsive-server"):
time.sleep(5)
return paramiko.AUTH_SUCCESSFUL
return paramiko.AUTH_FAILED
def check_auth_publickey(self, username, key):
try:
expected = FINGERPRINTS[key.get_name()]
except KeyError:
return paramiko.AUTH_FAILED
# Base check: allowed auth type & fingerprint matches
happy = (
key.get_name() in self.__allowed_keys
and key.get_fingerprint() == expected
)
# Secondary check: if test wants assertions about cert data
if (
self.__expected_public_blob is not None
and key.public_blob != self.__expected_public_blob
):
happy = False
return paramiko.AUTH_SUCCESSFUL if happy else paramiko.AUTH_FAILED
def check_channel_request(self, kind, chanid):
return paramiko.OPEN_SUCCEEDED
def check_channel_exec_request(self, channel, command):
if command != b"yes":
return False
return True
def check_channel_env_request(self, channel, name, value):
if name == "INVALID_ENV":
return False
if not hasattr(channel, "env"):
setattr(channel, "env", {})
channel.env[name] = value
return True
class ClientTest(unittest.TestCase):
def setUp(self):
self.sockl = socket.socket()
self.sockl.bind(("localhost", 0))
self.sockl.listen(1)
self.addr, self.port = self.sockl.getsockname()
self.connect_kwargs = dict(
hostname=self.addr,
port=self.port,
username="slowdive",
look_for_keys=False,
)
self.event = threading.Event()
self.kill_event = threading.Event()
def tearDown(self):
# Shut down client Transport
if hasattr(self, "tc"):
self.tc.close()
# Shut down shared socket
if hasattr(self, "sockl"):
# Signal to server thread that it should shut down early; it checks
# this immediately after accept(). (In scenarios where connection
# actually succeeded during the test, this becomes a no-op.)
self.kill_event.set()
# Forcibly connect to server sock in case the server thread is
# hanging out in its accept() (e.g. if the client side of the test
# fails before it even gets to connecting); there's no other good
# way to force an accept() to exit.
put_a_sock_in_it = socket.socket()
put_a_sock_in_it.connect((self.addr, self.port))
put_a_sock_in_it.close()
# Then close "our" end of the socket (which _should_ cause the
# accept() to bail out, but does not, for some reason. I blame
# threading.)
self.sockl.close()
def _run(
self,
allowed_keys=None,
delay=0,
public_blob=None,
kill_event=None,
server_name=None,
):
if allowed_keys is None:
allowed_keys = FINGERPRINTS.keys()
self.socks, addr = self.sockl.accept()
# If the kill event was set at this point, it indicates an early
# shutdown, so bail out now and don't even try setting up a Transport
# (which will just verbosely die.)
if kill_event and kill_event.is_set():
self.socks.close()
return
self.ts = paramiko.Transport(self.socks)
if server_name is not None:
self.ts.local_version = server_name
keypath = _support("rsa.key")
host_key = paramiko.RSAKey.from_private_key_file(keypath)
self.ts.add_server_key(host_key)
keypath = _support("ecdsa-256.key")
host_key = paramiko.ECDSAKey.from_private_key_file(keypath)
self.ts.add_server_key(host_key)
server = NullServer(allowed_keys=allowed_keys, public_blob=public_blob)
if delay:
time.sleep(delay)
self.ts.start_server(self.event, server)
def _test_connection(self, **kwargs):
"""
(Most) kwargs get passed directly into SSHClient.connect().
The exceptions are ``allowed_keys``/``public_blob``/``server_name``
which are stripped and handed to the ``NullServer`` used for testing.
"""
run_kwargs = {"kill_event": self.kill_event}
for key in ("allowed_keys", "public_blob", "server_name"):
run_kwargs[key] = kwargs.pop(key, None)
# Server setup
threading.Thread(target=self._run, kwargs=run_kwargs).start()
host_key = paramiko.RSAKey.from_private_key_file(_support("rsa.key"))
public_host_key = paramiko.RSAKey(data=host_key.asbytes())
# Client setup
self.tc = SSHClient()
self.tc.get_host_keys().add(
f"[{self.addr}]:{self.port}", "ssh-rsa", public_host_key
)
# Actual connection
self.tc.connect(**dict(self.connect_kwargs, **kwargs))
# Authentication successful?
self.event.wait(1.0)
self.assertTrue(self.event.is_set())
self.assertTrue(self.ts.is_active())
self.assertEqual(
self.connect_kwargs["username"], self.ts.get_username()
)
self.assertEqual(True, self.ts.is_authenticated())
self.assertEqual(False, self.tc.get_transport().gss_kex_used)
# Command execution functions?
stdin, stdout, stderr = self.tc.exec_command("yes")
schan = self.ts.accept(1.0)
# Nobody else tests the API of exec_command so let's do it here for
# now. :weary:
assert isinstance(stdin, paramiko.ChannelStdinFile)
assert isinstance(stdout, paramiko.ChannelFile)
assert isinstance(stderr, paramiko.ChannelStderrFile)
schan.send("Hello there.\n")
schan.send_stderr("This is on stderr.\n")
schan.close()
self.assertEqual("Hello there.\n", stdout.readline())
self.assertEqual("", stdout.readline())
self.assertEqual("This is on stderr.\n", stderr.readline())
self.assertEqual("", stderr.readline())
# Cleanup
stdin.close()
stdout.close()
stderr.close()
class SSHClientTest(ClientTest):
@requires_sha1_signing
def test_client(self):
"""
verify that the SSHClient stuff works too.
"""
self._test_connection(password="pygmalion")
@requires_sha1_signing
def test_client_dsa(self):
"""
verify that SSHClient works with a DSA key.
"""
self._test_connection(key_filename=_support("dss.key"))
@requires_sha1_signing
def test_client_rsa(self):
"""
verify that SSHClient works with an RSA key.
"""
self._test_connection(key_filename=_support("rsa.key"))
@requires_sha1_signing
def test_client_ecdsa(self):
"""
verify that SSHClient works with an ECDSA key.
"""
self._test_connection(key_filename=_support("ecdsa-256.key"))
@requires_sha1_signing
def test_client_ed25519(self):
self._test_connection(key_filename=_support("ed25519.key"))
@requires_sha1_signing
def test_multiple_key_files(self):
"""
verify that SSHClient accepts and tries multiple key files.
"""
# This is dumb :(
types_ = {
"rsa": "ssh-rsa",
"dss": "ssh-dss",
"ecdsa": "ecdsa-sha2-nistp256",
}
# Various combos of attempted & valid keys
# TODO: try every possible combo using itertools functions
# TODO: use new key(s) fixture(s)
for attempt, accept in (
(["rsa", "dss"], ["dss"]), # Original test #3
(["dss", "rsa"], ["dss"]), # Ordering matters sometimes, sadly
(["dss", "rsa", "ecdsa-256"], ["dss"]), # Try ECDSA but fail
(["rsa", "ecdsa-256"], ["ecdsa"]), # ECDSA success
):
try:
self._test_connection(
key_filename=[
_support("{}.key".format(x)) for x in attempt
],
allowed_keys=[types_[x] for x in accept],
)
finally:
# Clean up to avoid occasional gc-related deadlocks.
# TODO: use nose test generators after nose port
self.tearDown()
self.setUp()
@requires_sha1_signing
def test_multiple_key_files_failure(self):
"""
Expect failure when multiple keys in play and none are accepted
"""
# Until #387 is fixed we have to catch a high-up exception since
# various platforms trigger different errors here >_<
self.assertRaises(
SSHException,
self._test_connection,
key_filename=[_support("rsa.key")],
allowed_keys=["ecdsa-sha2-nistp256"],
)
@requires_sha1_signing
def test_certs_allowed_as_key_filename_values(self):
# NOTE: giving cert path here, not key path. (Key path test is below.
# They're similar except for which path is given; the expected auth and
# server-side behavior is 100% identical.)
# NOTE: only bothered whipping up one cert per overall class/family.
for type_ in ("rsa", "dss", "ecdsa-256", "ed25519"):
key_path = _support(f"{type_}.key")
self._test_connection(
key_filename=key_path,
public_blob=PublicBlob.from_file(f"{key_path}-cert.pub"),
)
@requires_sha1_signing
def test_certs_implicitly_loaded_alongside_key_filename_keys(self):
# NOTE: a regular test_connection() w/ rsa.key would incidentally
# test this (because test_xxx.key-cert.pub exists) but incidental tests
# stink, so NullServer and friends were updated to allow assertions
# about the server-side key object's public blob. Thus, we can prove
# that a specific cert was found, along with regular authorization
# succeeding proving that the overall flow works.
for type_ in ("rsa", "dss", "ecdsa-256", "ed25519"):
key_path = _support(f"{type_}.key")
self._test_connection(
key_filename=key_path,
public_blob=PublicBlob.from_file(f"{key_path}-cert.pub"),
)
def _cert_algo_test(self, ver, alg):
# Issue #2017; see auth_handler.py
self.connect_kwargs["username"] = "somecertuser" # neuter pw auth
self._test_connection(
# NOTE: SSHClient is able to take either the key or the cert & will
# set up its internals as needed
key_filename=_support("rsa.key-cert.pub"),
server_name="SSH-2.0-OpenSSH_{}".format(ver),
)
assert (
self.tc._transport._agreed_pubkey_algorithm
== "{}[email protected]".format(alg)
)
@requires_sha1_signing
def test_old_openssh_needs_ssh_rsa_for_certs_not_rsa_sha2(self):
self._cert_algo_test(ver="7.7", alg="ssh-rsa")
@requires_sha1_signing
def test_newer_openssh_uses_rsa_sha2_for_certs_not_ssh_rsa(self):
# NOTE: 512 happens to be first in our list and is thus chosen
self._cert_algo_test(ver="7.8", alg="rsa-sha2-512")
def test_default_key_locations_trigger_cert_loads_if_found(self):
# TODO: what it says on the tin: ~/.ssh/id_rsa tries to load
# ~/.ssh/id_rsa-cert.pub. Right now no other tests actually test that
# code path (!) so we're punting too, sob.
pass
def test_auto_add_policy(self):
"""
verify that SSHClient's AutoAddPolicy works.
"""
threading.Thread(target=self._run).start()
hostname = f"[{self.addr}]:{self.port}"
key_file = _support("ecdsa-256.key")
public_host_key = paramiko.ECDSAKey.from_private_key_file(key_file)
self.tc = SSHClient()
self.tc.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.assertEqual(0, len(self.tc.get_host_keys()))
self.tc.connect(password="pygmalion", **self.connect_kwargs)
self.event.wait(1.0)
self.assertTrue(self.event.is_set())
self.assertTrue(self.ts.is_active())
self.assertEqual("slowdive", self.ts.get_username())
self.assertEqual(True, self.ts.is_authenticated())
self.assertEqual(1, len(self.tc.get_host_keys()))
new_host_key = list(self.tc.get_host_keys()[hostname].values())[0]
self.assertEqual(public_host_key, new_host_key)
def test_save_host_keys(self):
"""
verify that SSHClient correctly saves a known_hosts file.
"""
warnings.filterwarnings("ignore", "tempnam.*")
host_key = paramiko.RSAKey.from_private_key_file(_support("rsa.key"))
public_host_key = paramiko.RSAKey(data=host_key.asbytes())
fd, localname = mkstemp()
os.close(fd)
client = SSHClient()
assert len(client.get_host_keys()) == 0
host_id = f"[{self.addr}]:{self.port}"
client.get_host_keys().add(host_id, "ssh-rsa", public_host_key)
assert len(client.get_host_keys()) == 1
assert public_host_key == client.get_host_keys()[host_id]["ssh-rsa"]
client.save_host_keys(localname)
with open(localname) as fd:
assert host_id in fd.read()
os.unlink(localname)
def test_cleanup(self):
"""
verify that when an SSHClient is collected, its transport (and the
transport's packetizer) is closed.
"""
# Skipped on PyPy because it fails on CI for unknown reasons
if platform.python_implementation() == "PyPy":
return
threading.Thread(target=self._run).start()
self.tc = SSHClient()
self.tc.set_missing_host_key_policy(paramiko.AutoAddPolicy())
assert len(self.tc.get_host_keys()) == 0
self.tc.connect(**dict(self.connect_kwargs, password="pygmalion"))
self.event.wait(1.0)
assert self.event.is_set()
assert self.ts.is_active()
p = weakref.ref(self.tc._transport.packetizer)
assert p() is not None
self.tc.close()
del self.tc
# force a collection to see whether the SSHClient object is deallocated
# 2 GCs are needed on PyPy, time is needed for Python 3
# TODO 4.0: this still fails randomly under CircleCI under Python 3.7,
# 3.8 at the very least. bumped sleep 0.3->1.0s but the underlying
# functionality should get reevaluated now we've dropped Python 2.
time.sleep(1)
gc.collect()
gc.collect()
assert p() is None
@patch("paramiko.client.socket.socket")
@patch("paramiko.client.socket.getaddrinfo")
def test_closes_socket_on_socket_errors(self, getaddrinfo, mocket):
getaddrinfo.return_value = (
("irrelevant", None, None, None, "whatever"),
)
class SocksToBeYou(socket.error):
pass
my_socket = mocket.return_value
my_socket.connect.side_effect = SocksToBeYou
client = SSHClient()
with pytest.raises(SocksToBeYou):
client.connect(hostname="nope")
my_socket.close.assert_called_once_with()
def test_client_can_be_used_as_context_manager(self):
"""
verify that an SSHClient can be used a context manager
"""
threading.Thread(target=self._run).start()
with SSHClient() as tc:
self.tc = tc
self.tc.set_missing_host_key_policy(paramiko.AutoAddPolicy())
assert len(self.tc.get_host_keys()) == 0
self.tc.connect(**dict(self.connect_kwargs, password="pygmalion"))
self.event.wait(1.0)
self.assertTrue(self.event.is_set())
self.assertTrue(self.ts.is_active())
self.assertTrue(self.tc._transport is not None)
self.assertTrue(self.tc._transport is None)
def test_banner_timeout(self):
"""
verify that the SSHClient has a configurable banner timeout.
"""
# Start the thread with a 1 second wait.
threading.Thread(target=self._run, kwargs={"delay": 1}).start()
host_key = paramiko.RSAKey.from_private_key_file(_support("rsa.key"))
public_host_key = paramiko.RSAKey(data=host_key.asbytes())
self.tc = SSHClient()
self.tc.get_host_keys().add(
f"[{self.addr}]:{self.port}", "ssh-rsa", public_host_key
)
# Connect with a half second banner timeout.
kwargs = dict(self.connect_kwargs, banner_timeout=0.5)
self.assertRaises(paramiko.SSHException, self.tc.connect, **kwargs)
@requires_sha1_signing
def test_auth_trickledown(self):
"""
Failed key auth doesn't prevent subsequent pw auth from succeeding
"""
# NOTE: re #387, re #394
# If pkey module used within Client._auth isn't correctly handling auth
# errors (e.g. if it allows things like ValueError to bubble up as per
# midway through #394) client.connect() will fail (at key load step)
# instead of succeeding (at password step)
kwargs = dict(
# Password-protected key whose passphrase is not 'pygmalion' (it's
# 'television' as per tests/test_pkey.py). NOTE: must use
# key_filename, loading the actual key here with PKey will except
# immediately; we're testing the try/except crap within Client.
key_filename=[_support("test_rsa_password.key")],
# Actual password for default 'slowdive' user
password="pygmalion",
)
self._test_connection(**kwargs)
@requires_sha1_signing
@slow
def test_auth_timeout(self):
"""
verify that the SSHClient has a configurable auth timeout
"""
# Connect with a half second auth timeout
self.assertRaises(
AuthenticationException,
self._test_connection,
password="unresponsive-server",
auth_timeout=0.5,
)
@patch.object(
paramiko.Channel,
"_set_remote_channel",
lambda *args, **kwargs: time.sleep(100),
)
def test_channel_timeout(self):
"""
verify that the SSHClient has a configurable channel timeout
"""
threading.Thread(target=self._run).start()
# Client setup
self.tc = SSHClient()
self.tc.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# Actual connection
self.tc.connect(
**dict(
self.connect_kwargs, password="pygmalion", channel_timeout=0.5
)
)
self.event.wait(1.0)
self.assertRaises(paramiko.SSHException, self.tc.open_sftp)
@requires_gss_auth
def test_auth_trickledown_gsskex(self):
"""
Failed gssapi-keyex doesn't prevent subsequent key from succeeding
"""
kwargs = dict(gss_kex=True, key_filename=[_support("rsa.key")])
self._test_connection(**kwargs)
@requires_gss_auth
def test_auth_trickledown_gssauth(self):
"""
Failed gssapi-with-mic doesn't prevent subsequent key from succeeding
"""
kwargs = dict(gss_auth=True, key_filename=[_support("rsa.key")])
self._test_connection(**kwargs)
def test_reject_policy(self):
"""
verify that SSHClient's RejectPolicy works.
"""
threading.Thread(target=self._run).start()
self.tc = SSHClient()
self.tc.set_missing_host_key_policy(paramiko.RejectPolicy())
self.assertEqual(0, len(self.tc.get_host_keys()))
self.assertRaises(
paramiko.SSHException,
self.tc.connect,
password="pygmalion",
**self.connect_kwargs,
)
@requires_gss_auth
def test_reject_policy_gsskex(self):
"""
verify that SSHClient's RejectPolicy works,
even if gssapi-keyex was enabled but not used.
"""
# Test for a bug present in paramiko versions released before
# 2017-08-01
threading.Thread(target=self._run).start()
self.tc = SSHClient()
self.tc.set_missing_host_key_policy(paramiko.RejectPolicy())
self.assertEqual(0, len(self.tc.get_host_keys()))
self.assertRaises(
paramiko.SSHException,
self.tc.connect,
password="pygmalion",
gss_kex=True,
**self.connect_kwargs,
)
def _client_host_key_bad(self, host_key):
threading.Thread(target=self._run).start()
hostname = f"[{self.addr}]:{self.port}"
self.tc = SSHClient()
self.tc.set_missing_host_key_policy(paramiko.WarningPolicy())
known_hosts = self.tc.get_host_keys()
known_hosts.add(hostname, host_key.get_name(), host_key)
self.assertRaises(
paramiko.BadHostKeyException,
self.tc.connect,
password="pygmalion",
**self.connect_kwargs,
)
def _client_host_key_good(self, ktype, kfile):
threading.Thread(target=self._run).start()
hostname = f"[{self.addr}]:{self.port}"
self.tc = SSHClient()
self.tc.set_missing_host_key_policy(paramiko.RejectPolicy())
host_key = ktype.from_private_key_file(_support(kfile))
known_hosts = self.tc.get_host_keys()
known_hosts.add(hostname, host_key.get_name(), host_key)
self.tc.connect(password="pygmalion", **self.connect_kwargs)
self.event.wait(1.0)
self.assertTrue(self.event.is_set())
self.assertTrue(self.ts.is_active())
self.assertEqual(True, self.ts.is_authenticated())
def test_host_key_negotiation_1(self):
host_key = paramiko.ECDSAKey.generate()
self._client_host_key_bad(host_key)
@requires_sha1_signing
def test_host_key_negotiation_2(self):
host_key = paramiko.RSAKey.generate(2048)
self._client_host_key_bad(host_key)
def test_host_key_negotiation_3(self):
self._client_host_key_good(paramiko.ECDSAKey, "ecdsa-256.key")
@requires_sha1_signing
def test_host_key_negotiation_4(self):
self._client_host_key_good(paramiko.RSAKey, "rsa.key")
def _setup_for_env(self):
threading.Thread(target=self._run).start()
self.tc = SSHClient()
self.tc.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.assertEqual(0, len(self.tc.get_host_keys()))
self.tc.connect(
self.addr, self.port, username="slowdive", password="pygmalion"
)
self.event.wait(1.0)
self.assertTrue(self.event.isSet())
self.assertTrue(self.ts.is_active())
def test_update_environment(self):
"""
Verify that environment variables can be set by the client.
"""
self._setup_for_env()
target_env = {b"A": b"B", b"C": b"d"}
self.tc.exec_command("yes", environment=target_env)
schan = self.ts.accept(1.0)
self.assertEqual(target_env, getattr(schan, "env", {}))
schan.close()
@unittest.skip("Clients normally fail silently, thus so do we, for now")
def test_env_update_failures(self):
self._setup_for_env()
with self.assertRaises(SSHException) as manager:
# Verify that a rejection by the server can be detected
self.tc.exec_command("yes", environment={b"INVALID_ENV": b""})
self.assertTrue(
"INVALID_ENV" in str(manager.exception),
"Expected variable name in error message",
)
self.assertTrue(
isinstance(manager.exception.args[1], SSHException),
"Expected original SSHException in exception",
)
def test_missing_key_policy_accepts_classes_or_instances(self):
"""
Client.missing_host_key_policy() can take classes or instances.
"""
# AN ACTUAL UNIT TEST?! GOOD LORD
# (But then we have to test a private API...meh.)
client = SSHClient()
# Default
assert isinstance(client._policy, paramiko.RejectPolicy)
# Hand in an instance (classic behavior)
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
assert isinstance(client._policy, paramiko.AutoAddPolicy)
# Hand in just the class (new behavior)
client.set_missing_host_key_policy(paramiko.AutoAddPolicy)
assert isinstance(client._policy, paramiko.AutoAddPolicy)
@patch("paramiko.client.Transport")
def test_disabled_algorithms_defaults_to_None(self, Transport):
SSHClient().connect("host", sock=Mock(), password="no")
assert Transport.call_args[1]["disabled_algorithms"] is None
@patch("paramiko.client.Transport")
def test_disabled_algorithms_passed_directly_if_given(self, Transport):
SSHClient().connect(
"host",
sock=Mock(),
password="no",
disabled_algorithms={"keys": ["ssh-dss"]},
)
call_arg = Transport.call_args[1]["disabled_algorithms"]
assert call_arg == {"keys": ["ssh-dss"]}
@patch("paramiko.client.Transport")
def test_transport_factory_defaults_to_Transport(self, Transport):
sock, kex, creds, algos = Mock(), Mock(), Mock(), Mock()
SSHClient().connect(
"host",
sock=sock,
password="no",
gss_kex=kex,
gss_deleg_creds=creds,
disabled_algorithms=algos,
)
Transport.assert_called_once_with(
sock, gss_kex=kex, gss_deleg_creds=creds, disabled_algorithms=algos
)
@patch("paramiko.client.Transport")
def test_transport_factory_may_be_specified(self, Transport):
factory = Mock()
sock, kex, creds, algos = Mock(), Mock(), Mock(), Mock()
SSHClient().connect(
"host",
sock=sock,
password="no",
gss_kex=kex,
gss_deleg_creds=creds,
disabled_algorithms=algos,
transport_factory=factory,
)
factory.assert_called_once_with(
sock, gss_kex=kex, gss_deleg_creds=creds, disabled_algorithms=algos
)
# Safety check
assert not Transport.called
class PasswordPassphraseTests(ClientTest):
# TODO: most of these could reasonably be set up to use mocks/assertions
# (e.g. "gave passphrase -> expect PKey was given it as the passphrase")
# instead of suffering a real connection cycle.
# TODO: in that case, move the below to be part of an integration suite?
@requires_sha1_signing
def test_password_kwarg_works_for_password_auth(self):
# Straightforward / duplicate of earlier basic password test.
self._test_connection(password="pygmalion")
# TODO: more granular exception pending #387; should be signaling "no auth
# methods available" because no key and no password
@raises(SSHException)
@requires_sha1_signing
def test_passphrase_kwarg_not_used_for_password_auth(self):
# Using the "right" password in the "wrong" field shouldn't work.
self._test_connection(passphrase="pygmalion")
@requires_sha1_signing
def test_passphrase_kwarg_used_for_key_passphrase(self):
# Straightforward again, with new passphrase kwarg.
self._test_connection(
key_filename=_support("test_rsa_password.key"),
passphrase="television",
)
@requires_sha1_signing
def test_password_kwarg_used_for_passphrase_when_no_passphrase_kwarg_given(
self,
): # noqa
# Backwards compatibility: passphrase in the password field.
self._test_connection(
key_filename=_support("test_rsa_password.key"),
password="television",
)
@raises(AuthenticationException) # TODO: more granular
@requires_sha1_signing
def test_password_kwarg_not_used_for_passphrase_when_passphrase_kwarg_given( # noqa
self,
):
# Sanity: if we're given both fields, the password field is NOT used as
# a passphrase.
self._test_connection(
key_filename=_support("test_rsa_password.key"),
password="television",
passphrase="wat? lol no",
)
| 31,084 | Python | .py | 723 | 33.957123 | 103 | 0.62469 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
567 | auth.py | paramiko_paramiko/tests/auth.py | """
Tests focusing primarily on the authentication step.
Thus, they concern AuthHandler and AuthStrategy, with a side of Transport.
"""
from logging import Logger
from unittest.mock import Mock
from pytest import raises
from paramiko import (
AgentKey,
AuthenticationException,
AuthFailure,
AuthResult,
AuthSource,
AuthStrategy,
BadAuthenticationType,
DSSKey,
InMemoryPrivateKey,
NoneAuth,
OnDiskPrivateKey,
Password,
PrivateKey,
PKey,
RSAKey,
SSHException,
ServiceRequestingTransport,
SourceResult,
)
from ._util import (
_disable_sha1_pubkey,
_disable_sha2,
_disable_sha2_pubkey,
_support,
requires_sha1_signing,
server,
unicodey,
)
class AuthHandler_:
"""
Most of these tests are explicit about the auth method they call.
This is because not too many other tests do so (they rely on the implicit
auth trigger of various connect() kwargs).
"""
def bad_auth_type(self):
"""
verify that we get the right exception when an unsupported auth
type is requested.
"""
# Server won't allow password auth for this user, so should fail
# and return just publickey allowed types
with server(
connect=dict(username="unknown", password="error"),
catch_error=True,
) as (_, _, err):
assert isinstance(err, BadAuthenticationType)
assert err.allowed_types == ["publickey"]
def bad_password(self):
"""
verify that a bad password gets the right exception, and that a retry
with the right password works.
"""
# NOTE: Transport.connect doesn't do any auth upfront if no userauth
# related kwargs given.
with server(defer=True) as (tc, ts):
# Auth once, badly
with raises(AuthenticationException):
tc.auth_password(username="slowdive", password="error")
# And again, correctly
tc.auth_password(username="slowdive", password="pygmalion")
def multipart_auth(self):
"""
verify that multipart auth works.
"""
with server(defer=True) as (tc, ts):
assert tc.auth_password(
username="paranoid", password="paranoid"
) == ["publickey"]
key = DSSKey.from_private_key_file(_support("dss.key"))
assert tc.auth_publickey(username="paranoid", key=key) == []
def interactive_auth(self):
"""
verify keyboard-interactive auth works.
"""
def handler(title, instructions, prompts):
self.got_title = title
self.got_instructions = instructions
self.got_prompts = prompts
return ["cat"]
with server(defer=True) as (tc, ts):
assert tc.auth_interactive("commie", handler) == []
assert self.got_title == "password"
assert self.got_prompts == [("Password", False)]
def interactive_fallback(self):
"""
verify that a password auth attempt will fallback to "interactive"
if password auth isn't supported but interactive is.
"""
with server(defer=True) as (tc, ts):
# This username results in an allowed_auth of just kbd-int,
# and has a configured interactive->response on the server.
assert tc.auth_password("commie", "cat") == []
def utf8(self):
"""
verify that utf-8 encoding happens in authentication.
"""
with server(defer=True) as (tc, ts):
assert tc.auth_password("utf8", unicodey) == []
def non_utf8(self):
"""
verify that non-utf-8 encoded passwords can be used for broken
servers.
"""
with server(defer=True) as (tc, ts):
assert tc.auth_password("non-utf8", "\xff") == []
def auth_exception_when_disconnected(self):
"""
verify that we catch a server disconnecting during auth, and report
it as an auth failure.
"""
with server(defer=True, skip_verify=True) as (tc, ts), raises(
AuthenticationException
):
tc.auth_password("bad-server", "hello")
def non_responsive_triggers_auth_exception(self):
"""
verify that authentication times out if server takes to long to
respond (or never responds).
"""
with server(defer=True, skip_verify=True) as (tc, ts), raises(
AuthenticationException
) as info:
tc.auth_timeout = 1 # 1 second, to speed up test
tc.auth_password("unresponsive-server", "hello")
assert "Authentication timeout" in str(info.value)
class AuthOnlyHandler_:
def _server(self, *args, **kwargs):
kwargs.setdefault("transport_factory", ServiceRequestingTransport)
return server(*args, **kwargs)
class fallback_pubkey_algorithm:
@requires_sha1_signing
def key_type_algo_selected_when_no_server_sig_algs(self):
privkey = RSAKey.from_private_key_file(_support("rsa.key"))
# Server pretending to be an apparently common setup:
# - doesn't support (or have enabled) sha2
# - also doesn't support (or have enabled) server-sig-algs/ext-info
# This is the scenario in which Paramiko has to guess-the-algo, and
# where servers that don't support sha2 or server-sig-algs can give
# us trouble.
server_init = dict(_disable_sha2_pubkey, server_sig_algs=False)
with self._server(
pubkeys=[privkey],
connect=dict(pkey=privkey),
server_init=server_init,
catch_error=True,
) as (tc, ts, err):
# Auth did work
assert tc.is_authenticated()
# Selected ssh-rsa, instead of first-in-the-list (rsa-sha2-512)
assert tc._agreed_pubkey_algorithm == "ssh-rsa"
@requires_sha1_signing
def key_type_algo_selection_is_cert_suffix_aware(self):
# This key has a cert next to it, which should trigger cert-aware
# loading within key classes.
privkey = PKey.from_path(_support("rsa.key"))
server_init = dict(_disable_sha2_pubkey, server_sig_algs=False)
with self._server(
pubkeys=[privkey],
connect=dict(pkey=privkey),
server_init=server_init,
catch_error=True,
) as (tc, ts, err):
assert not err
# Auth did work
assert tc.is_authenticated()
# Selected expected cert type
assert (
tc._agreed_pubkey_algorithm
== "[email protected]"
)
@requires_sha1_signing
def uses_first_preferred_algo_if_key_type_not_in_list(self):
# This is functionally the same as legacy AuthHandler, just
# arriving at the same place in a different manner.
privkey = RSAKey.from_private_key_file(_support("rsa.key"))
server_init = dict(_disable_sha2_pubkey, server_sig_algs=False)
with self._server(
pubkeys=[privkey],
connect=dict(pkey=privkey),
server_init=server_init,
client_init=_disable_sha1_pubkey, # no ssh-rsa
catch_error=True,
) as (tc, ts, err):
assert not tc.is_authenticated()
assert isinstance(err, AuthenticationException)
assert tc._agreed_pubkey_algorithm == "rsa-sha2-512"
class SHA2SignaturePubkeys:
def pubkey_auth_honors_disabled_algorithms(self):
privkey = RSAKey.from_private_key_file(_support("rsa.key"))
with server(
pubkeys=[privkey],
connect=dict(pkey=privkey),
init=dict(
disabled_algorithms=dict(
pubkeys=["ssh-rsa", "rsa-sha2-256", "rsa-sha2-512"]
)
),
catch_error=True,
) as (_, _, err):
assert isinstance(err, SSHException)
assert "no RSA pubkey algorithms" in str(err)
def client_sha2_disabled_server_sha1_disabled_no_match(self):
privkey = RSAKey.from_private_key_file(_support("rsa.key"))
with server(
pubkeys=[privkey],
connect=dict(pkey=privkey),
client_init=_disable_sha2_pubkey,
server_init=_disable_sha1_pubkey,
catch_error=True,
) as (tc, ts, err):
assert isinstance(err, AuthenticationException)
def client_sha1_disabled_server_sha2_disabled_no_match(self):
privkey = RSAKey.from_private_key_file(_support("rsa.key"))
with server(
pubkeys=[privkey],
connect=dict(pkey=privkey),
client_init=_disable_sha1_pubkey,
server_init=_disable_sha2_pubkey,
catch_error=True,
) as (tc, ts, err):
assert isinstance(err, AuthenticationException)
@requires_sha1_signing
def ssh_rsa_still_used_when_sha2_disabled(self):
privkey = RSAKey.from_private_key_file(_support("rsa.key"))
# NOTE: this works because key obj comparison uses public bytes
# TODO: would be nice for PKey to grow a legit "give me another obj of
# same class but just the public bits" using asbytes()
with server(
pubkeys=[privkey], connect=dict(pkey=privkey), init=_disable_sha2
) as (tc, _):
assert tc.is_authenticated()
@requires_sha1_signing
def first_client_preferred_algo_used_when_no_server_sig_algs(self):
privkey = RSAKey.from_private_key_file(_support("rsa.key"))
# Server pretending to be an apparently common setup:
# - doesn't support (or have enabled) sha2
# - also doesn't support (or have enabled) server-sig-algs/ext-info
# This is the scenario in which Paramiko has to guess-the-algo, and
# where servers that don't support sha2 or server-sig-algs give us
# trouble.
server_init = dict(_disable_sha2_pubkey, server_sig_algs=False)
with server(
pubkeys=[privkey],
connect=dict(username="slowdive", pkey=privkey),
server_init=server_init,
catch_error=True,
) as (tc, ts, err):
assert not tc.is_authenticated()
assert isinstance(err, AuthenticationException)
# Oh no! this isn't ssh-rsa, and our server doesn't support sha2!
assert tc._agreed_pubkey_algorithm == "rsa-sha2-512"
def sha2_512(self):
privkey = RSAKey.from_private_key_file(_support("rsa.key"))
with server(
pubkeys=[privkey],
connect=dict(pkey=privkey),
init=dict(
disabled_algorithms=dict(pubkeys=["ssh-rsa", "rsa-sha2-256"])
),
) as (tc, ts):
assert tc.is_authenticated()
assert tc._agreed_pubkey_algorithm == "rsa-sha2-512"
def sha2_256(self):
privkey = RSAKey.from_private_key_file(_support("rsa.key"))
with server(
pubkeys=[privkey],
connect=dict(pkey=privkey),
init=dict(
disabled_algorithms=dict(pubkeys=["ssh-rsa", "rsa-sha2-512"])
),
) as (tc, ts):
assert tc.is_authenticated()
assert tc._agreed_pubkey_algorithm == "rsa-sha2-256"
def sha2_256_when_client_only_enables_256(self):
privkey = RSAKey.from_private_key_file(_support("rsa.key"))
with server(
pubkeys=[privkey],
connect=dict(pkey=privkey),
# Client-side only; server still accepts all 3.
client_init=dict(
disabled_algorithms=dict(pubkeys=["ssh-rsa", "rsa-sha2-512"])
),
) as (tc, ts):
assert tc.is_authenticated()
assert tc._agreed_pubkey_algorithm == "rsa-sha2-256"
class AuthSource_:
class base_class:
def init_requires_and_saves_username(self):
with raises(TypeError):
AuthSource()
assert AuthSource(username="foo").username == "foo"
def dunder_repr_delegates_to_helper(self):
source = AuthSource("foo")
source._repr = Mock(wraps=lambda: "whatever")
repr(source)
source._repr.assert_called_once_with()
def repr_helper_prints_basic_kv_pairs(self):
assert repr(AuthSource("foo")) == "AuthSource()"
assert (
AuthSource("foo")._repr(bar="open") == "AuthSource(bar='open')"
)
def authenticate_takes_transport_and_is_abstract(self):
# TODO: this test kinda just goes away once we're typed?
with raises(TypeError):
AuthSource("foo").authenticate()
with raises(NotImplementedError):
AuthSource("foo").authenticate(None)
class NoneAuth_:
def authenticate_auths_none(self):
trans = Mock()
result = NoneAuth("foo").authenticate(trans)
trans.auth_none.assert_called_once_with("foo")
assert result is trans.auth_none.return_value
def repr_shows_class(self):
assert repr(NoneAuth("foo")) == "NoneAuth()"
class Password_:
def init_takes_and_stores_password_getter(self):
with raises(TypeError):
Password("foo")
getter = Mock()
pw = Password("foo", password_getter=getter)
assert pw.password_getter is getter
def repr_adds_username(self):
pw = Password("foo", password_getter=Mock())
assert repr(pw) == "Password(user='foo')"
def authenticate_gets_and_supplies_password(self):
getter = Mock(return_value="bar")
trans = Mock()
pw = Password("foo", password_getter=getter)
result = pw.authenticate(trans)
trans.auth_password.assert_called_once_with("foo", "bar")
assert result is trans.auth_password.return_value
class PrivateKey_:
def authenticate_calls_publickey_with_pkey(self):
source = PrivateKey(username="foo")
source.pkey = Mock() # set by subclasses
trans = Mock()
result = source.authenticate(trans)
trans.auth_publickey.assert_called_once_with("foo", source.pkey)
assert result is trans.auth_publickey.return_value
class InMemoryPrivateKey_:
def init_takes_pkey_object(self):
with raises(TypeError):
InMemoryPrivateKey("foo")
pkey = Mock()
source = InMemoryPrivateKey(username="foo", pkey=pkey)
assert source.pkey is pkey
def repr_shows_pkey_repr(self):
pkey = PKey.from_path(_support("ed25519.key"))
source = InMemoryPrivateKey("foo", pkey)
assert (
repr(source)
== "InMemoryPrivateKey(pkey=PKey(alg=ED25519, bits=256, fp=SHA256:J6VESFdD3xSChn8y9PzWzeF+1tl892mOy2TqkMLO4ow))" # noqa
)
def repr_appends_agent_flag_when_AgentKey(self):
real_key = PKey.from_path(_support("ed25519.key"))
pkey = AgentKey(agent=None, blob=bytes(real_key))
source = InMemoryPrivateKey("foo", pkey)
assert (
repr(source)
== "InMemoryPrivateKey(pkey=PKey(alg=ED25519, bits=256, fp=SHA256:J6VESFdD3xSChn8y9PzWzeF+1tl892mOy2TqkMLO4ow)) [agent]" # noqa
)
class OnDiskPrivateKey_:
def init_takes_source_path_and_pkey(self):
with raises(TypeError):
OnDiskPrivateKey("foo")
with raises(TypeError):
OnDiskPrivateKey("foo", "bar")
with raises(TypeError):
OnDiskPrivateKey("foo", "bar", "biz")
source = OnDiskPrivateKey(
username="foo",
source="ssh-config",
path="of-exile",
pkey="notreally",
)
assert source.username == "foo"
assert source.source == "ssh-config"
assert source.path == "of-exile"
assert source.pkey == "notreally"
def init_requires_specific_value_for_source(self):
with raises(
ValueError,
match=r"source argument must be one of: \('ssh-config', 'python-config', 'implicit-home'\)", # noqa
):
OnDiskPrivateKey("foo", source="what?", path="meh", pkey="no")
def repr_reflects_source_path_and_pkey(self):
source = OnDiskPrivateKey(
username="foo",
source="ssh-config",
path="of-exile",
pkey="notreally",
)
assert (
repr(source)
== "OnDiskPrivateKey(key='notreally', source='ssh-config', path='of-exile')" # noqa
)
class AuthResult_:
def setup_method(self):
self.strat = AuthStrategy(None)
def acts_like_list_with_strategy_attribute(self):
with raises(TypeError):
AuthResult()
# kwarg works by itself
AuthResult(strategy=self.strat)
# or can be given as posarg w/ regular list() args after
result = AuthResult(self.strat, [1, 2, 3])
assert result.strategy is self.strat
assert result == [1, 2, 3]
assert isinstance(result, list)
def repr_is_list_repr_untouched(self):
result = AuthResult(self.strat, [1, 2, 3])
assert repr(result) == "[1, 2, 3]"
class dunder_str:
def is_multiline_display_of_sourceresult_tuples(self):
result = AuthResult(self.strat)
result.append(SourceResult("foo", "bar"))
result.append(SourceResult("biz", "baz"))
assert str(result) == "foo -> bar\nbiz -> baz"
def shows_str_not_repr_of_auth_source_and_result(self):
result = AuthResult(self.strat)
result.append(
SourceResult(NoneAuth("foo"), ["password", "pubkey"])
)
assert str(result) == "NoneAuth() -> ['password', 'pubkey']"
def empty_list_result_values_show_success_string(self):
result = AuthResult(self.strat)
result.append(SourceResult(NoneAuth("foo"), []))
assert str(result) == "NoneAuth() -> success"
class AuthFailure_:
def is_an_AuthenticationException(self):
assert isinstance(AuthFailure(None), AuthenticationException)
def init_requires_result(self):
with raises(TypeError):
AuthFailure()
result = AuthResult(None)
fail = AuthFailure(result=result)
assert fail.result is result
def str_is_newline_plus_result_str(self):
result = AuthResult(None)
result.append(SourceResult(NoneAuth("foo"), Exception("onoz")))
fail = AuthFailure(result)
assert str(fail) == "\nNoneAuth() -> onoz"
class AuthStrategy_:
def init_requires_ssh_config_param_and_sets_up_a_logger(self):
with raises(TypeError):
AuthStrategy()
conf = object()
strat = AuthStrategy(ssh_config=conf)
assert strat.ssh_config is conf
assert isinstance(strat.log, Logger)
assert strat.log.name == "paramiko.auth_strategy"
def get_sources_is_abstract(self):
with raises(NotImplementedError):
AuthStrategy(None).get_sources()
class authenticate:
def setup_method(self):
self.strat = AuthStrategy(None) # ssh_config not used directly
self.source, self.transport = NoneAuth(None), Mock()
self.source.authenticate = Mock()
self.strat.get_sources = Mock(return_value=[self.source])
def requires_and_uses_transport_with_methods_returning_result(self):
with raises(TypeError):
self.strat.authenticate()
result = self.strat.authenticate(self.transport)
self.strat.get_sources.assert_called_once_with()
self.source.authenticate.assert_called_once_with(self.transport)
assert isinstance(result, AuthResult)
assert result.strategy is self.strat
assert len(result) == 1
source_res = result[0]
assert isinstance(source_res, SourceResult)
assert source_res.source is self.source
assert source_res.result is self.source.authenticate.return_value
def logs_sources_attempted(self):
self.strat.log = Mock()
self.strat.authenticate(self.transport)
self.strat.log.debug.assert_called_once_with("Trying NoneAuth()")
def raises_AuthFailure_if_no_successes(self):
self.strat.log = Mock()
oops = Exception("onoz")
self.source.authenticate.side_effect = oops
with raises(AuthFailure) as info:
self.strat.authenticate(self.transport)
result = info.value.result
assert isinstance(result, AuthResult)
assert len(result) == 1
source_res = result[0]
assert isinstance(source_res, SourceResult)
assert source_res.source is self.source
assert source_res.result is oops
self.strat.log.info.assert_called_once_with(
"Authentication via NoneAuth() failed with Exception"
)
def short_circuits_on_successful_auth(self):
kaboom = Mock(authenticate=Mock(side_effect=Exception("onoz")))
self.strat.get_sources.return_value = [self.source, kaboom]
result = self.strat.authenticate(self.transport)
# No exception, and it's just a regular ol Result
assert isinstance(result, AuthResult)
# And it did not capture any attempt to execute the 2nd source
assert len(result) == 1
assert result[0].source is self.source
| 22,311 | Python | .py | 511 | 32.577299 | 144 | 0.595877 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
568 | test_pkey.py | paramiko_paramiko/tests/test_pkey.py | # -*- coding: utf-8 -*-
# Copyright (C) 2003-2009 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Some unit tests for public/private key objects.
"""
import unittest
import os
import stat
from binascii import hexlify
from hashlib import md5
from io import StringIO
from paramiko import (
RSAKey,
DSSKey,
ECDSAKey,
Ed25519Key,
Message,
util,
SSHException,
)
from paramiko.util import b
from paramiko.common import o600, byte_chr
from cryptography.exceptions import UnsupportedAlgorithm
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateNumbers
from unittest.mock import patch, Mock
import pytest
from ._util import _support, is_low_entropy, requires_sha1_signing
# from openssh's ssh-keygen
PUB_RSA = "ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEA049W6geFpmsljTwfvI1UmKWWJPNFI74+vNKTk4dmzkQY2yAMs6FhlvhlI8ysU4oj71ZsRYMecHbBbxdN79+JRFVYTKaLqjwGENeTd+yv4q+V2PvZv3fLnzApI3l7EJCqhWwJUHJ1jAkZzqDx0tyOL4uoZpww3nmE0kb3y21tH4c=" # noqa
PUB_DSS = "ssh-dss AAAAB3NzaC1kc3MAAACBAOeBpgNnfRzr/twmAQRu2XwWAp3CFtrVnug6s6fgwj/oLjYbVtjAy6pl/h0EKCWx2rf1IetyNsTxWrniA9I6HeDj65X1FyDkg6g8tvCnaNB8Xp/UUhuzHuGsMIipRxBxw9LF608EqZcj1E3ytktoW5B5OcjrkEoz3xG7C+rpIjYvAAAAFQDwz4UnmsGiSNu5iqjn3uTzwUpshwAAAIEAkxfFeY8P2wZpDjX0MimZl5wkoFQDL25cPzGBuB4OnB8NoUk/yjAHIIpEShw8V+LzouMK5CTJQo5+Ngw3qIch/WgRmMHy4kBq1SsXMjQCte1So6HBMvBPIW5SiMTmjCfZZiw4AYHK+B/JaOwaG9yRg2Ejg4Ok10+XFDxlqZo8Y+wAAACARmR7CCPjodxASvRbIyzaVpZoJ/Z6x7dAumV+ysrV1BVYd0lYukmnjO1kKBWApqpH1ve9XDQYN8zgxM4b16L21kpoWQnZtXrY3GZ4/it9kUgyB7+NwacIBlXa8cMDL7Q/69o0d54U0X/NeX5QxuYR6OMJlrkQB7oiW/P/1mwjQgE=" # noqa
PUB_ECDSA_256 = "ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBJSPZm3ZWkvk/Zx8WP+fZRZ5/NBBHnGQwR6uIC6XHGPDIHuWUzIjAwA0bzqkOUffEsbLe+uQgKl5kbc/L8KA/eo=" # noqa
PUB_ECDSA_384 = "ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBBbGibQLW9AAZiGN2hEQxWYYoFaWKwN3PKSaDJSMqmIn1Z9sgRUuw8Y/w502OGvXL/wFk0i2z50l3pWZjD7gfMH7gX5TUiCzwrQkS+Hn1U2S9aF5WJp0NcIzYxXw2r4M2A==" # noqa
PUB_ECDSA_521 = "ecdsa-sha2-nistp521 AAAAE2VjZHNhLXNoYTItbmlzdHA1MjEAAAAIbmlzdHA1MjEAAACFBACaOaFLZGuxa5AW16qj6VLypFbLrEWrt9AZUloCMefxO8bNLjK/O5g0rAVasar1TnyHE9qj4NwzANZASWjQNbc4MAG8vzqezFwLIn/kNyNTsXNfqEko9OgHZknlj2Z79dwTJcRAL4QLcT5aND0EHZLB2fAUDXiWIb2j4rg1mwPlBMiBXA==" # noqa
PUB_RSA_2K_OPENSSH = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDF+Dpr54DX0WdeTDpNAMdkCWEkl3OXtNgf58qlN1gX572OLBqLf0zT4bHstUEpU3piazph/rSWcUMuBoD46tZ6jiH7H9b9Pem2eYQWaELDDkM+v9BMbEy5rMbFRLol5OtEvPFqneyEAanPOgvd8t3yyhSev9QVusakzJ8j8LGgrA8huYZ+Srnw0shEWLG70KUKCh3rG0QIvA8nfhtUOisr2Gp+F0YxMGb5gwBlQYAYE5l6u1SjZ7hNjyNosjK+wRBFgFFBYVpkZKJgWoK9w4ijFyzMZTucnZMqKOKAjIJvHfKBf2/cEfYxSq1EndqTqjYsd9T7/s2vcn1OH5a0wkER" # noqa
RSA_2K_OPENSSH_P = 161773687847617758886803946572654778625119997081005961935077336594287351354258259920334554906235187683459069634729972458348855793639393524799865799559575414247668746919721196359908321800753913350455861871582087986355637886875933045224711827701526739934602161222599672381604211130651397331775901258858869418853 # noqa
RSA_2K_OPENSSH_Q = 154483416325630619558401349033571772244816915504195060221073502923720741119664820208064202825686848103224453777955988437823797692957091438442833606009978046057345917301441832647551208158342812551003395417862260727795454409459089912659057393394458150862012620127030757893820711157099494238156383382454310199869 # noqa
PUB_DSS_1K_OPENSSH = "ssh-dss AAAAB3NzaC1kc3MAAACBAL8XEx7F9xuwBNles+vWpNF+YcofrBhjX1r5QhpBe0eoYWLHRcroN6lxwCdGYRfgOoRjTncBiixQX/uUxAY96zDh3ir492s2BcJt4ihvNn/AY0I0OTuX/2IwGk9CGzafjaeZNVYxMa8lcVt0hSOTjkPQ7gVuk6bJzMInvie+VWKLAAAAFQDUgYdY+rhR0SkKbC09BS/SIHcB+wAAAIB44+4zpCNcd0CGvZlowH99zyPX8uxQtmTLQFuR2O8O0FgVVuCdDgD0D9W8CLOp32oatpM0jyyN89EdvSWzjHzZJ+L6H1FtZps7uhpDFWHdva1R25vyGecLMUuXjo5t/D7oCDih+HwHoSAxoi0QvsPd8/qqHQVznNJKtR6thUpXEwAAAIAG4DCBjbgTTgpBw0egRkJwBSz0oTt+1IcapNU2jA6N8urMSk9YXHEQHKN68BAF3YJ59q2Ujv3LOXmBqGd1T+kzwUszfMlgzq8MMu19Yfzse6AIK1Agn1Vj6F7YXLsXDN+T4KszX5+FJa7t/Zsp3nALWy6l0f4WKivEF5Y2QpEFcQ==" # noqa
PUB_EC_384_OPENSSH = "ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBIch5LXTq/L/TWsTGG6dIktxD8DIMh7EfvoRmWsks6CuNDTvFvbQNtY4QO1mn5OXegHbS0M5DPIS++wpKGFP3suDEH08O35vZQasLNrL0tO2jyyEnzB2ZEx3PPYci811yg==" # noqa
FINGER_RSA = "1024 60:73:38:44:cb:51:86:65:7f:de:da:a2:2b:5a:57:d5"
FINGER_DSS = "1024 44:78:f0:b9:a2:3c:c5:18:20:09:ff:75:5b:c1:d2:6c"
FINGER_ECDSA_256 = "256 25:19:eb:55:e6:a1:47:ff:4f:38:d2:75:6f:a5:d5:60"
FINGER_ECDSA_384 = "384 c1:8d:a0:59:09:47:41:8e:a8:a6:07:01:29:23:b4:65"
FINGER_ECDSA_521 = "521 44:58:22:52:12:33:16:0e:ce:0e:be:2c:7c:7e:cc:1e"
SIGNED_RSA = "20:d7:8a:31:21:cb:f7:92:12:f2:a4:89:37:f5:78:af:e6:16:b6:25:b9:97:3d:a2:cd:5f:ca:20:21:73:4c:ad:34:73:8f:20:77:28:e2:94:15:08:d8:91:40:7a:85:83:bf:18:37:95:dc:54:1a:9b:88:29:6c:73:ca:38:b4:04:f1:56:b9:f2:42:9d:52:1b:29:29:b4:4f:fd:c9:2d:af:47:d2:40:76:30:f3:63:45:0c:d9:1d:43:86:0f:1c:70:e2:93:12:34:f3:ac:c5:0a:2f:14:50:66:59:f1:88:ee:c1:4a:e9:d1:9c:4e:46:f0:0e:47:6f:38:74:f1:44:a8" # noqa
SIGNED_RSA_256 = "cc:6:60:e0:0:2c:ac:9e:26:bc:d5:68:64:3f:9f:a7:e5:aa:41:eb:88:4a:25:5:9c:93:84:66:ef:ef:60:f4:34:fb:f4:c8:3d:55:33:6a:77:bd:b2:ee:83:f:71:27:41:7e:f5:7:5:0:a9:4c:7:80:6f:be:76:67:cb:58:35:b9:2b:f3:c2:d3:3c:ee:e1:3f:59:e0:fa:e4:5c:92:ed:ae:74:de:d:d6:27:16:8f:84:a3:86:68:c:94:90:7d:6e:cc:81:12:d8:b6:ad:aa:31:a8:13:3d:63:81:3e:bb:5:b6:38:4d:2:d:1b:5b:70:de:83:cc:3a:cb:31" # noqa
SIGNED_RSA_512 = "87:46:8b:75:92:33:78:a0:22:35:32:39:23:c6:ab:e1:6:92:ad:bc:7f:6e:ab:19:32:e4:78:b2:2c:8f:1d:c:65:da:fc:a5:7:ca:b6:55:55:31:83:b1:a0:af:d1:95:c5:2e:af:56:ba:f5:41:64:f:39:9d:af:82:43:22:8f:90:52:9d:89:e7:45:97:df:f3:f2:bc:7b:3a:db:89:e:34:fd:18:62:25:1b:ef:77:aa:c6:6c:99:36:3a:84:d6:9c:2a:34:8c:7f:f4:bb:c9:a5:9a:6c:11:f2:cf:da:51:5e:1e:7f:90:27:34:de:b2:f3:15:4f:db:47:32:6b:a7" # noqa
FINGER_RSA_2K_OPENSSH = "2048 68:d1:72:01:bf:c0:0c:66:97:78:df:ce:75:74:46:d6"
FINGER_DSS_1K_OPENSSH = "1024 cf:1d:eb:d7:61:d3:12:94:c6:c0:c6:54:35:35:b0:82"
FINGER_EC_384_OPENSSH = "384 72:14:df:c1:9a:c3:e6:0e:11:29:d6:32:18:7b:ea:9b"
RSA_PRIVATE_OUT = """\
-----BEGIN RSA PRIVATE KEY-----
MIICWgIBAAKBgQDTj1bqB4WmayWNPB+8jVSYpZYk80Ujvj680pOTh2bORBjbIAyz
oWGW+GUjzKxTiiPvVmxFgx5wdsFvF03v34lEVVhMpouqPAYQ15N37K/ir5XY+9m/
d8ufMCkjeXsQkKqFbAlQcnWMCRnOoPHS3I4vi6hmnDDeeYTSRvfLbW0fhwIBIwKB
gBIiOqZYaoqbeD9OS9z2K9KR2atlTxGxOJPXiP4ESqP3NVScWNwyZ3NXHpyrJLa0
EbVtzsQhLn6rF+TzXnOlcipFvjsem3iYzCpuChfGQ6SovTcOjHV9z+hnpXvQ/fon
soVRZY65wKnF7IAoUwTmJS9opqgrN6kRgCd3DASAMd1bAkEA96SBVWFt/fJBNJ9H
tYnBKZGw0VeHOYmVYbvMSstssn8un+pQpUm9vlG/bp7Oxd/m+b9KWEh2xPfv6zqU
avNwHwJBANqzGZa/EpzF4J8pGti7oIAPUIDGMtfIcmqNXVMckrmzQ2vTfqtkEZsA
4rE1IERRyiJQx6EJsz21wJmGV9WJQ5kCQQDwkS0uXqVdFzgHO6S++tjmjYcxwr3g
H0CoFYSgbddOT6miqRskOQF3DZVkJT3kyuBgU2zKygz52ukQZMqxCb1fAkASvuTv
qfpH87Qq5kQhNKdbbwbmd2NxlNabazPijWuphGTdW0VfJdWfklyS2Kr+iqrs/5wV
HhathJt636Eg7oIjAkA8ht3MQ+XSl9yIJIS8gVpbPxSw5OMfw0PjVE7tBdQruiSc
nvuQES5C9BMHjF39LZiGH1iLQy7FgdHyoP+eodI7
-----END RSA PRIVATE KEY-----
"""
DSS_PRIVATE_OUT = """\
-----BEGIN DSA PRIVATE KEY-----
MIIBuwIBAAKBgQDngaYDZ30c6/7cJgEEbtl8FgKdwhba1Z7oOrOn4MI/6C42G1bY
wMuqZf4dBCglsdq39SHrcjbE8Vq54gPSOh3g4+uV9Rcg5IOoPLbwp2jQfF6f1FIb
sx7hrDCIqUcQccPSxetPBKmXI9RN8rZLaFuQeTnI65BKM98Ruwvq6SI2LwIVAPDP
hSeawaJI27mKqOfe5PPBSmyHAoGBAJMXxXmPD9sGaQ419DIpmZecJKBUAy9uXD8x
gbgeDpwfDaFJP8owByCKREocPFfi86LjCuQkyUKOfjYMN6iHIf1oEZjB8uJAatUr
FzI0ArXtUqOhwTLwTyFuUojE5own2WYsOAGByvgfyWjsGhvckYNhI4ODpNdPlxQ8
ZamaPGPsAoGARmR7CCPjodxASvRbIyzaVpZoJ/Z6x7dAumV+ysrV1BVYd0lYukmn
jO1kKBWApqpH1ve9XDQYN8zgxM4b16L21kpoWQnZtXrY3GZ4/it9kUgyB7+NwacI
BlXa8cMDL7Q/69o0d54U0X/NeX5QxuYR6OMJlrkQB7oiW/P/1mwjQgECFGI9QPSc
h9pT9XHqn+1rZ4bK+QGA
-----END DSA PRIVATE KEY-----
"""
ECDSA_PRIVATE_OUT_256 = """\
-----BEGIN EC PRIVATE KEY-----
MHcCAQEEIKB6ty3yVyKEnfF/zprx0qwC76MsMlHY4HXCnqho2eKioAoGCCqGSM49
AwEHoUQDQgAElI9mbdlaS+T9nHxY/59lFnn80EEecZDBHq4gLpccY8Mge5ZTMiMD
ADRvOqQ5R98Sxst765CAqXmRtz8vwoD96g==
-----END EC PRIVATE KEY-----
"""
ECDSA_PRIVATE_OUT_384 = """\
-----BEGIN EC PRIVATE KEY-----
MIGkAgEBBDBDdO8IXvlLJgM7+sNtPl7tI7FM5kzuEUEEPRjXIPQM7mISciwJPBt+
y43EuG8nL4mgBwYFK4EEACKhZANiAAQWxom0C1vQAGYhjdoREMVmGKBWlisDdzyk
mgyUjKpiJ9WfbIEVLsPGP8OdNjhr1y/8BZNIts+dJd6VmYw+4HzB+4F+U1Igs8K0
JEvh59VNkvWheViadDXCM2MV8Nq+DNg=
-----END EC PRIVATE KEY-----
"""
ECDSA_PRIVATE_OUT_521 = """\
-----BEGIN EC PRIVATE KEY-----
MIHcAgEBBEIAprQtAS3OF6iVUkT8IowTHWicHzShGgk86EtuEXvfQnhZFKsWm6Jo
iqAr1yEaiuI9LfB3Xs8cjuhgEEfbduYr/f6gBwYFK4EEACOhgYkDgYYABACaOaFL
ZGuxa5AW16qj6VLypFbLrEWrt9AZUloCMefxO8bNLjK/O5g0rAVasar1TnyHE9qj
4NwzANZASWjQNbc4MAG8vzqezFwLIn/kNyNTsXNfqEko9OgHZknlj2Z79dwTJcRA
L4QLcT5aND0EHZLB2fAUDXiWIb2j4rg1mwPlBMiBXA==
-----END EC PRIVATE KEY-----
"""
x1234 = b"\x01\x02\x03\x04"
TEST_KEY_BYTESTR = "\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x01#\x00\x00\x00\x00ӏV\x07k%<\x1fT$E#>ғfD\x18 \x0cae#̬S#VlE\x1epvo\x17M߉DUXL<\x06\x10דw\u2bd5ٿw˟0)#y{\x10l\tPru\t\x19Π\u070e/f0yFmm\x1f" # noqa
class KeyTest(unittest.TestCase):
def assert_keyfile_is_encrypted(self, keyfile):
"""
A quick check that filename looks like an encrypted key.
"""
with open(keyfile, "r") as fh:
self.assertEqual(
fh.readline()[:-1], "-----BEGIN RSA PRIVATE KEY-----"
)
self.assertEqual(fh.readline()[:-1], "Proc-Type: 4,ENCRYPTED")
self.assertEqual(fh.readline()[0:10], "DEK-Info: ")
def test_generate_key_bytes(self):
key = util.generate_key_bytes(md5, x1234, "happy birthday", 30)
exp = b"\x61\xE1\xF2\x72\xF4\xC1\xC4\x56\x15\x86\xBD\x32\x24\x98\xC0\xE9\x24\x67\x27\x80\xF4\x7B\xB3\x7D\xDA\x7D\x54\x01\x9E\x64" # noqa
self.assertEqual(exp, key)
def test_load_rsa(self):
key = RSAKey.from_private_key_file(_support("rsa.key"))
self.assertEqual("ssh-rsa", key.get_name())
exp_rsa = b(FINGER_RSA.split()[1].replace(":", ""))
my_rsa = hexlify(key.get_fingerprint())
self.assertEqual(exp_rsa, my_rsa)
self.assertEqual(PUB_RSA.split()[1], key.get_base64())
self.assertEqual(1024, key.get_bits())
s = StringIO()
key.write_private_key(s)
self.assertEqual(RSA_PRIVATE_OUT, s.getvalue())
s.seek(0)
key2 = RSAKey.from_private_key(s)
self.assertEqual(key, key2)
def test_load_rsa_transmutes_crypto_exceptions(self):
# TODO: nix unittest for pytest
for exception in (TypeError("onoz"), UnsupportedAlgorithm("oops")):
with patch(
"paramiko.rsakey.serialization.load_der_private_key"
) as loader:
loader.side_effect = exception
with pytest.raises(SSHException, match=str(exception)):
RSAKey.from_private_key_file(_support("rsa.key"))
def test_loading_empty_keys_errors_usefully(self):
# #1599 - raise SSHException instead of IndexError
with pytest.raises(SSHException, match="no lines"):
RSAKey.from_private_key_file(_support("blank_rsa.key"))
def test_load_rsa_password(self):
key = RSAKey.from_private_key_file(
_support("test_rsa_password.key"), "television"
)
self.assertEqual("ssh-rsa", key.get_name())
exp_rsa = b(FINGER_RSA.split()[1].replace(":", ""))
my_rsa = hexlify(key.get_fingerprint())
self.assertEqual(exp_rsa, my_rsa)
self.assertEqual(PUB_RSA.split()[1], key.get_base64())
self.assertEqual(1024, key.get_bits())
def test_load_dss(self):
key = DSSKey.from_private_key_file(_support("dss.key"))
self.assertEqual("ssh-dss", key.get_name())
exp_dss = b(FINGER_DSS.split()[1].replace(":", ""))
my_dss = hexlify(key.get_fingerprint())
self.assertEqual(exp_dss, my_dss)
self.assertEqual(PUB_DSS.split()[1], key.get_base64())
self.assertEqual(1024, key.get_bits())
s = StringIO()
key.write_private_key(s)
self.assertEqual(DSS_PRIVATE_OUT, s.getvalue())
s.seek(0)
key2 = DSSKey.from_private_key(s)
self.assertEqual(key, key2)
def test_load_dss_password(self):
key = DSSKey.from_private_key_file(
_support("test_dss_password.key"), "television"
)
self.assertEqual("ssh-dss", key.get_name())
exp_dss = b(FINGER_DSS.split()[1].replace(":", ""))
my_dss = hexlify(key.get_fingerprint())
self.assertEqual(exp_dss, my_dss)
self.assertEqual(PUB_DSS.split()[1], key.get_base64())
self.assertEqual(1024, key.get_bits())
def test_compare_rsa(self):
# verify that the private & public keys compare equal
key = RSAKey.from_private_key_file(_support("rsa.key"))
self.assertEqual(key, key)
pub = RSAKey(data=key.asbytes())
self.assertTrue(key.can_sign())
self.assertTrue(not pub.can_sign())
self.assertEqual(key, pub)
def test_compare_dss(self):
# verify that the private & public keys compare equal
key = DSSKey.from_private_key_file(_support("dss.key"))
self.assertEqual(key, key)
pub = DSSKey(data=key.asbytes())
self.assertTrue(key.can_sign())
self.assertTrue(not pub.can_sign())
self.assertEqual(key, pub)
def _sign_and_verify_rsa(self, algorithm, saved_sig):
key = RSAKey.from_private_key_file(_support("rsa.key"))
msg = key.sign_ssh_data(b"ice weasels", algorithm)
assert isinstance(msg, Message)
msg.rewind()
assert msg.get_text() == algorithm
expected = b"".join(
[byte_chr(int(x, 16)) for x in saved_sig.split(":")]
)
assert msg.get_binary() == expected
msg.rewind()
pub = RSAKey(data=key.asbytes())
self.assertTrue(pub.verify_ssh_sig(b"ice weasels", msg))
@requires_sha1_signing
def test_sign_and_verify_ssh_rsa(self):
self._sign_and_verify_rsa("ssh-rsa", SIGNED_RSA)
def test_sign_and_verify_rsa_sha2_512(self):
self._sign_and_verify_rsa("rsa-sha2-512", SIGNED_RSA_512)
def test_sign_and_verify_rsa_sha2_256(self):
self._sign_and_verify_rsa("rsa-sha2-256", SIGNED_RSA_256)
def test_sign_dss(self):
# verify that the dss private key can sign and verify
key = DSSKey.from_private_key_file(_support("dss.key"))
msg = key.sign_ssh_data(b"ice weasels")
self.assertTrue(type(msg) is Message)
msg.rewind()
self.assertEqual("ssh-dss", msg.get_text())
# can't do the same test as we do for RSA, because DSS signatures
# are usually different each time. but we can test verification
# anyway so it's ok.
self.assertEqual(40, len(msg.get_binary()))
msg.rewind()
pub = DSSKey(data=key.asbytes())
self.assertTrue(pub.verify_ssh_sig(b"ice weasels", msg))
@requires_sha1_signing
def test_generate_rsa(self):
key = RSAKey.generate(1024)
msg = key.sign_ssh_data(b"jerri blank")
msg.rewind()
self.assertTrue(key.verify_ssh_sig(b"jerri blank", msg))
def test_generate_dss(self):
key = DSSKey.generate(1024)
msg = key.sign_ssh_data(b"jerri blank")
msg.rewind()
self.assertTrue(key.verify_ssh_sig(b"jerri blank", msg))
def test_generate_ecdsa(self):
key = ECDSAKey.generate()
msg = key.sign_ssh_data(b"jerri blank")
msg.rewind()
self.assertTrue(key.verify_ssh_sig(b"jerri blank", msg))
self.assertEqual(key.get_bits(), 256)
self.assertEqual(key.get_name(), "ecdsa-sha2-nistp256")
key = ECDSAKey.generate(bits=256)
msg = key.sign_ssh_data(b"jerri blank")
msg.rewind()
self.assertTrue(key.verify_ssh_sig(b"jerri blank", msg))
self.assertEqual(key.get_bits(), 256)
self.assertEqual(key.get_name(), "ecdsa-sha2-nistp256")
key = ECDSAKey.generate(bits=384)
msg = key.sign_ssh_data(b"jerri blank")
msg.rewind()
self.assertTrue(key.verify_ssh_sig(b"jerri blank", msg))
self.assertEqual(key.get_bits(), 384)
self.assertEqual(key.get_name(), "ecdsa-sha2-nistp384")
key = ECDSAKey.generate(bits=521)
msg = key.sign_ssh_data(b"jerri blank")
msg.rewind()
self.assertTrue(key.verify_ssh_sig(b"jerri blank", msg))
self.assertEqual(key.get_bits(), 521)
self.assertEqual(key.get_name(), "ecdsa-sha2-nistp521")
def test_load_ecdsa_256(self):
key = ECDSAKey.from_private_key_file(_support("ecdsa-256.key"))
self.assertEqual("ecdsa-sha2-nistp256", key.get_name())
exp_ecdsa = b(FINGER_ECDSA_256.split()[1].replace(":", ""))
my_ecdsa = hexlify(key.get_fingerprint())
self.assertEqual(exp_ecdsa, my_ecdsa)
self.assertEqual(PUB_ECDSA_256.split()[1], key.get_base64())
self.assertEqual(256, key.get_bits())
s = StringIO()
key.write_private_key(s)
self.assertEqual(ECDSA_PRIVATE_OUT_256, s.getvalue())
s.seek(0)
key2 = ECDSAKey.from_private_key(s)
self.assertEqual(key, key2)
def test_load_ecdsa_password_256(self):
key = ECDSAKey.from_private_key_file(
_support("test_ecdsa_password_256.key"), b"television"
)
self.assertEqual("ecdsa-sha2-nistp256", key.get_name())
exp_ecdsa = b(FINGER_ECDSA_256.split()[1].replace(":", ""))
my_ecdsa = hexlify(key.get_fingerprint())
self.assertEqual(exp_ecdsa, my_ecdsa)
self.assertEqual(PUB_ECDSA_256.split()[1], key.get_base64())
self.assertEqual(256, key.get_bits())
def test_compare_ecdsa_256(self):
# verify that the private & public keys compare equal
key = ECDSAKey.from_private_key_file(_support("ecdsa-256.key"))
self.assertEqual(key, key)
pub = ECDSAKey(data=key.asbytes())
self.assertTrue(key.can_sign())
self.assertTrue(not pub.can_sign())
self.assertEqual(key, pub)
def test_sign_ecdsa_256(self):
# verify that the rsa private key can sign and verify
key = ECDSAKey.from_private_key_file(_support("ecdsa-256.key"))
msg = key.sign_ssh_data(b"ice weasels")
self.assertTrue(type(msg) is Message)
msg.rewind()
self.assertEqual("ecdsa-sha2-nistp256", msg.get_text())
# ECDSA signatures, like DSS signatures, tend to be different
# each time, so we can't compare against a "known correct"
# signature.
# Even the length of the signature can change.
msg.rewind()
pub = ECDSAKey(data=key.asbytes())
self.assertTrue(pub.verify_ssh_sig(b"ice weasels", msg))
def test_load_ecdsa_384(self):
key = ECDSAKey.from_private_key_file(_support("test_ecdsa_384.key"))
self.assertEqual("ecdsa-sha2-nistp384", key.get_name())
exp_ecdsa = b(FINGER_ECDSA_384.split()[1].replace(":", ""))
my_ecdsa = hexlify(key.get_fingerprint())
self.assertEqual(exp_ecdsa, my_ecdsa)
self.assertEqual(PUB_ECDSA_384.split()[1], key.get_base64())
self.assertEqual(384, key.get_bits())
s = StringIO()
key.write_private_key(s)
self.assertEqual(ECDSA_PRIVATE_OUT_384, s.getvalue())
s.seek(0)
key2 = ECDSAKey.from_private_key(s)
self.assertEqual(key, key2)
def test_load_ecdsa_password_384(self):
key = ECDSAKey.from_private_key_file(
_support("test_ecdsa_password_384.key"), b"television"
)
self.assertEqual("ecdsa-sha2-nistp384", key.get_name())
exp_ecdsa = b(FINGER_ECDSA_384.split()[1].replace(":", ""))
my_ecdsa = hexlify(key.get_fingerprint())
self.assertEqual(exp_ecdsa, my_ecdsa)
self.assertEqual(PUB_ECDSA_384.split()[1], key.get_base64())
self.assertEqual(384, key.get_bits())
def test_load_ecdsa_transmutes_crypto_exceptions(self):
path = _support("ecdsa-256.key")
# TODO: nix unittest for pytest
for exception in (TypeError("onoz"), UnsupportedAlgorithm("oops")):
with patch(
"paramiko.ecdsakey.serialization.load_der_private_key"
) as loader:
loader.side_effect = exception
with pytest.raises(SSHException, match=str(exception)):
ECDSAKey.from_private_key_file(path)
def test_compare_ecdsa_384(self):
# verify that the private & public keys compare equal
key = ECDSAKey.from_private_key_file(_support("test_ecdsa_384.key"))
self.assertEqual(key, key)
pub = ECDSAKey(data=key.asbytes())
self.assertTrue(key.can_sign())
self.assertTrue(not pub.can_sign())
self.assertEqual(key, pub)
def test_sign_ecdsa_384(self):
# verify that the rsa private key can sign and verify
key = ECDSAKey.from_private_key_file(_support("test_ecdsa_384.key"))
msg = key.sign_ssh_data(b"ice weasels")
self.assertTrue(type(msg) is Message)
msg.rewind()
self.assertEqual("ecdsa-sha2-nistp384", msg.get_text())
# ECDSA signatures, like DSS signatures, tend to be different
# each time, so we can't compare against a "known correct"
# signature.
# Even the length of the signature can change.
msg.rewind()
pub = ECDSAKey(data=key.asbytes())
self.assertTrue(pub.verify_ssh_sig(b"ice weasels", msg))
def test_load_ecdsa_521(self):
key = ECDSAKey.from_private_key_file(_support("test_ecdsa_521.key"))
self.assertEqual("ecdsa-sha2-nistp521", key.get_name())
exp_ecdsa = b(FINGER_ECDSA_521.split()[1].replace(":", ""))
my_ecdsa = hexlify(key.get_fingerprint())
self.assertEqual(exp_ecdsa, my_ecdsa)
self.assertEqual(PUB_ECDSA_521.split()[1], key.get_base64())
self.assertEqual(521, key.get_bits())
s = StringIO()
key.write_private_key(s)
# Different versions of OpenSSL (SSLeay versions 0x1000100f and
# 0x1000207f for instance) use different apparently valid (as far as
# ssh-keygen is concerned) padding. So we can't check the actual value
# of the pem encoded key.
s.seek(0)
key2 = ECDSAKey.from_private_key(s)
self.assertEqual(key, key2)
def test_load_ecdsa_password_521(self):
key = ECDSAKey.from_private_key_file(
_support("test_ecdsa_password_521.key"), b"television"
)
self.assertEqual("ecdsa-sha2-nistp521", key.get_name())
exp_ecdsa = b(FINGER_ECDSA_521.split()[1].replace(":", ""))
my_ecdsa = hexlify(key.get_fingerprint())
self.assertEqual(exp_ecdsa, my_ecdsa)
self.assertEqual(PUB_ECDSA_521.split()[1], key.get_base64())
self.assertEqual(521, key.get_bits())
def test_compare_ecdsa_521(self):
# verify that the private & public keys compare equal
key = ECDSAKey.from_private_key_file(_support("test_ecdsa_521.key"))
self.assertEqual(key, key)
pub = ECDSAKey(data=key.asbytes())
self.assertTrue(key.can_sign())
self.assertTrue(not pub.can_sign())
self.assertEqual(key, pub)
def test_sign_ecdsa_521(self):
# verify that the rsa private key can sign and verify
key = ECDSAKey.from_private_key_file(_support("test_ecdsa_521.key"))
msg = key.sign_ssh_data(b"ice weasels")
self.assertTrue(type(msg) is Message)
msg.rewind()
self.assertEqual("ecdsa-sha2-nistp521", msg.get_text())
# ECDSA signatures, like DSS signatures, tend to be different
# each time, so we can't compare against a "known correct"
# signature.
# Even the length of the signature can change.
msg.rewind()
pub = ECDSAKey(data=key.asbytes())
self.assertTrue(pub.verify_ssh_sig(b"ice weasels", msg))
def test_load_openssh_format_RSA_key(self):
key = RSAKey.from_private_key_file(
_support("test_rsa_openssh.key"), b"television"
)
self.assertEqual("ssh-rsa", key.get_name())
self.assertEqual(PUB_RSA_2K_OPENSSH.split()[1], key.get_base64())
self.assertEqual(2048, key.get_bits())
exp_rsa = b(FINGER_RSA_2K_OPENSSH.split()[1].replace(":", ""))
my_rsa = hexlify(key.get_fingerprint())
self.assertEqual(exp_rsa, my_rsa)
def test_loading_openssh_RSA_keys_uses_correct_p_q(self):
# Re #1723 - not the most elegant test but given how deep it is...
with patch(
"paramiko.rsakey.rsa.RSAPrivateNumbers", wraps=RSAPrivateNumbers
) as spy:
# Load key
RSAKey.from_private_key_file(
_support("test_rsa_openssh.key"), b"television"
)
# Ensure spy saw the correct P and Q values as derived from
# hardcoded test private key value
kwargs = spy.call_args[1]
assert kwargs["p"] == RSA_2K_OPENSSH_P
assert kwargs["q"] == RSA_2K_OPENSSH_Q
def test_load_openssh_format_DSS_key(self):
key = DSSKey.from_private_key_file(
_support("test_dss_openssh.key"), b"television"
)
self.assertEqual("ssh-dss", key.get_name())
self.assertEqual(PUB_DSS_1K_OPENSSH.split()[1], key.get_base64())
self.assertEqual(1024, key.get_bits())
exp_rsa = b(FINGER_DSS_1K_OPENSSH.split()[1].replace(":", ""))
my_rsa = hexlify(key.get_fingerprint())
self.assertEqual(exp_rsa, my_rsa)
def test_load_openssh_format_EC_key(self):
key = ECDSAKey.from_private_key_file(
_support("test_ecdsa_384_openssh.key"), b"television"
)
self.assertEqual("ecdsa-sha2-nistp384", key.get_name())
self.assertEqual(PUB_EC_384_OPENSSH.split()[1], key.get_base64())
self.assertEqual(384, key.get_bits())
exp_fp = b(FINGER_EC_384_OPENSSH.split()[1].replace(":", ""))
my_fp = hexlify(key.get_fingerprint())
self.assertEqual(exp_fp, my_fp)
def test_salt_size(self):
# Read an existing encrypted private key
file_ = _support("test_rsa_password.key")
password = "television"
newfile = file_ + ".new"
newpassword = "radio"
key = RSAKey(filename=file_, password=password)
# Write out a newly re-encrypted copy with a new password.
# When the bug under test exists, this will ValueError.
try:
key.write_private_key_file(newfile, password=newpassword)
self.assert_keyfile_is_encrypted(newfile)
# Verify the inner key data still matches (when no ValueError)
key2 = RSAKey(filename=newfile, password=newpassword)
self.assertEqual(key, key2)
finally:
os.remove(newfile)
def test_load_openssh_format_RSA_nopad(self):
# check just not exploding with 'Invalid key'
RSAKey.from_private_key_file(_support("test_rsa_openssh_nopad.key"))
def test_stringification(self):
key = RSAKey.from_private_key_file(_support("rsa.key"))
comparable = TEST_KEY_BYTESTR
self.assertEqual(str(key), comparable)
def test_ed25519(self):
key1 = Ed25519Key.from_private_key_file(_support("ed25519.key"))
key2 = Ed25519Key.from_private_key_file(
_support("test_ed25519_password.key"), b"abc123"
)
self.assertNotEqual(key1.asbytes(), key2.asbytes())
def test_ed25519_funky_padding(self):
# Proves #1306 by just not exploding with 'Invalid key'.
Ed25519Key.from_private_key_file(
_support("test_ed25519-funky-padding.key")
)
def test_ed25519_funky_padding_with_passphrase(self):
# Proves #1306 by just not exploding with 'Invalid key'.
Ed25519Key.from_private_key_file(
_support("test_ed25519-funky-padding_password.key"), b"asdf"
)
def test_ed25519_compare(self):
# verify that the private & public keys compare equal
key = Ed25519Key.from_private_key_file(_support("ed25519.key"))
self.assertEqual(key, key)
pub = Ed25519Key(data=key.asbytes())
self.assertTrue(key.can_sign())
self.assertTrue(not pub.can_sign())
self.assertEqual(key, pub)
# No point testing on systems that never exhibited the bug originally
@pytest.mark.skipif(
not is_low_entropy(), reason="Not a low-entropy system"
)
def test_ed25519_32bit_collision(self):
# Re: 2021.10.19 security report email: two different private keys
# which Paramiko compared as equal on low-entropy platforms.
original = Ed25519Key.from_private_key_file(
_support("badhash_key1.ed25519.key")
)
generated = Ed25519Key.from_private_key_file(
_support("badhash_key2.ed25519.key")
)
assert original != generated
def test_ed25519_nonbytes_password(self):
# https://github.com/paramiko/paramiko/issues/1039
Ed25519Key.from_private_key_file(
_support("test_ed25519_password.key"),
# NOTE: not a bytes. Amusingly, the test above for same key DOES
# explicitly cast to bytes...code smell!
"abc123",
)
# No exception -> it's good. Meh.
def test_ed25519_load_from_file_obj(self):
with open(_support("ed25519.key")) as pkey_fileobj:
key = Ed25519Key.from_private_key(pkey_fileobj)
self.assertEqual(key, key)
self.assertTrue(key.can_sign())
def test_keyfile_is_actually_encrypted(self):
# Read an existing encrypted private key
file_ = _support("test_rsa_password.key")
password = "television"
newfile = file_ + ".new"
newpassword = "radio"
key = RSAKey(filename=file_, password=password)
# Write out a newly re-encrypted copy with a new password.
# When the bug under test exists, this will ValueError.
try:
key.write_private_key_file(newfile, password=newpassword)
self.assert_keyfile_is_encrypted(newfile)
finally:
os.remove(newfile)
@patch("paramiko.pkey.os")
def _test_keyfile_race(self, os_, exists):
# Re: CVE-2022-24302
password = "television"
newpassword = "radio"
source = _support("test_ecdsa_384.key")
new = source + ".new"
# Mock setup
os_.path.exists.return_value = exists
# Attach os flag values to mock
for attr, value in vars(os).items():
if attr.startswith("O_"):
setattr(os_, attr, value)
# Load fixture key
key = ECDSAKey(filename=source, password=password)
key._write_private_key = Mock()
# Write out in new location
key.write_private_key_file(new, password=newpassword)
# Expected open via os module
os_.open.assert_called_once_with(
new, flags=os.O_WRONLY | os.O_CREAT | os.O_TRUNC, mode=o600
)
os_.fdopen.assert_called_once_with(os_.open.return_value, "w")
assert (
key._write_private_key.call_args[0][0]
== os_.fdopen.return_value.__enter__.return_value
)
def test_new_keyfiles_avoid_file_descriptor_race_on_chmod(self):
self._test_keyfile_race(exists=False)
def test_existing_keyfiles_still_work_ok(self):
self._test_keyfile_race(exists=True)
def test_new_keyfiles_avoid_descriptor_race_integration(self):
# Integration-style version of above
password = "television"
newpassword = "radio"
source = _support("test_ecdsa_384.key")
new = source + ".new"
# Load fixture key
key = ECDSAKey(filename=source, password=password)
try:
# Write out in new location
key.write_private_key_file(new, password=newpassword)
# Test mode
assert stat.S_IMODE(os.stat(new).st_mode) == o600
# Prove can open with new password
reloaded = ECDSAKey(filename=new, password=newpassword)
assert reloaded == key
finally:
if os.path.exists(new):
os.unlink(new)
| 33,006 | Python | .py | 619 | 45.547658 | 619 | 0.690112 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
569 | test_file.py | paramiko_paramiko/tests/test_file.py | # Copyright (C) 2003-2009 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Some unit tests for the BufferedFile abstraction.
"""
import unittest
from io import BytesIO
from paramiko.common import linefeed_byte, crlf, cr_byte
from paramiko.file import BufferedFile
from ._util import needs_builtin
class LoopbackFile(BufferedFile):
"""
BufferedFile object that you can write data into, and then read it back.
"""
def __init__(self, mode="r", bufsize=-1):
BufferedFile.__init__(self)
self._set_mode(mode, bufsize)
self.buffer = BytesIO()
self.offset = 0
def _read(self, size):
data = self.buffer.getvalue()[self.offset : self.offset + size]
self.offset += len(data)
return data
def _write(self, data):
self.buffer.write(data)
return len(data)
class BufferedFileTest(unittest.TestCase):
def test_simple(self):
f = LoopbackFile("r")
try:
f.write(b"hi")
self.assertTrue(False, "no exception on write to read-only file")
except:
pass
f.close()
f = LoopbackFile("w")
try:
f.read(1)
self.assertTrue(False, "no exception to read from write-only file")
except:
pass
f.close()
def test_readline(self):
f = LoopbackFile("r+U")
f.write(
b"First line.\nSecond line.\r\nThird line.\n"
+ b"Fourth line.\nFinal line non-terminated."
)
self.assertEqual(f.readline(), "First line.\n")
# universal newline mode should convert this linefeed:
self.assertEqual(f.readline(), "Second line.\n")
# truncated line:
self.assertEqual(f.readline(7), "Third l")
self.assertEqual(f.readline(), "ine.\n")
# newline should be detected and only the fourth line returned
self.assertEqual(f.readline(39), "Fourth line.\n")
self.assertEqual(f.readline(), "Final line non-terminated.")
self.assertEqual(f.readline(), "")
f.close()
try:
f.readline()
self.assertTrue(False, "no exception on readline of closed file")
except IOError:
pass
self.assertTrue(linefeed_byte in f.newlines)
self.assertTrue(crlf in f.newlines)
self.assertTrue(cr_byte not in f.newlines)
def test_lf(self):
"""
try to trick the linefeed detector.
"""
f = LoopbackFile("r+U")
f.write(b"First line.\r")
self.assertEqual(f.readline(), "First line.\n")
f.write(b"\nSecond.\r\n")
self.assertEqual(f.readline(), "Second.\n")
f.close()
self.assertEqual(f.newlines, crlf)
def test_write(self):
"""
verify that write buffering is on.
"""
f = LoopbackFile("r+", 1)
f.write(b"Complete line.\nIncomplete line.")
self.assertEqual(f.readline(), "Complete line.\n")
self.assertEqual(f.readline(), "")
f.write("..\n")
self.assertEqual(f.readline(), "Incomplete line...\n")
f.close()
def test_flush(self):
"""
verify that flush will force a write.
"""
f = LoopbackFile("r+", 512)
f.write("Not\nquite\n512 bytes.\n")
self.assertEqual(f.read(1), b"")
f.flush()
self.assertEqual(f.read(6), b"Not\nqu")
self.assertEqual(f.read(4), b"ite\n")
self.assertEqual(f.read(5), b"512 b")
self.assertEqual(f.read(9), b"ytes.\n")
self.assertEqual(f.read(3), b"")
f.close()
def test_buffering_flushes(self):
"""
verify that flushing happens automatically on buffer crossing.
"""
f = LoopbackFile("r+", 16)
f.write(b"Too small.")
self.assertEqual(f.read(4), b"")
f.write(b" ")
self.assertEqual(f.read(4), b"")
f.write(b"Enough.")
self.assertEqual(f.read(20), b"Too small. Enough.")
f.close()
def test_read_all(self):
"""
verify that read(-1) returns everything left in the file.
"""
f = LoopbackFile("r+", 16)
f.write(b"The first thing you need to do is open your eyes. ")
f.write(b"Then, you need to close them again.\n")
s = f.read(-1)
self.assertEqual(
s,
b"The first thing you need to do is open your eyes. Then, you "
+ b"need to close them again.\n",
)
f.close()
def test_readable(self):
f = LoopbackFile("r")
self.assertTrue(f.readable())
self.assertFalse(f.writable())
self.assertFalse(f.seekable())
f.close()
def test_writable(self):
f = LoopbackFile("w")
self.assertTrue(f.writable())
self.assertFalse(f.readable())
self.assertFalse(f.seekable())
f.close()
def test_readinto(self):
data = bytearray(5)
f = LoopbackFile("r+")
f._write(b"hello")
f.readinto(data)
self.assertEqual(data, b"hello")
f.close()
def test_write_bad_type(self):
with LoopbackFile("wb") as f:
self.assertRaises(TypeError, f.write, object())
def test_write_unicode_as_binary(self):
text = "\xa7 why is writing text to a binary file allowed?\n"
with LoopbackFile("rb+") as f:
f.write(text)
self.assertEqual(f.read(), text.encode("utf-8"))
@needs_builtin("memoryview")
def test_write_bytearray(self):
with LoopbackFile("rb+") as f:
f.write(bytearray(12))
self.assertEqual(f.read(), 12 * b"\0")
@needs_builtin("buffer")
def test_write_buffer(self):
data = 3 * b"pretend giant block of data\n"
offsets = range(0, len(data), 8)
with LoopbackFile("rb+") as f:
for offset in offsets:
f.write(buffer(data, offset, 8)) # noqa
self.assertEqual(f.read(), data)
@needs_builtin("memoryview")
def test_write_memoryview(self):
data = 3 * b"pretend giant block of data\n"
offsets = range(0, len(data), 8)
with LoopbackFile("rb+") as f:
view = memoryview(data)
for offset in offsets:
f.write(view[offset : offset + 8])
self.assertEqual(f.read(), data)
if __name__ == "__main__":
from unittest import main
main()
| 7,208 | Python | .py | 196 | 28.72449 | 79 | 0.600974 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
570 | _util.py | paramiko_paramiko/tests/_util.py | from contextlib import contextmanager
from os.path import dirname, realpath, join
import builtins
import os
from pathlib import Path
import socket
import struct
import sys
import unittest
import time
import threading
import pytest
from paramiko import (
ServerInterface,
RSAKey,
DSSKey,
AUTH_FAILED,
AUTH_PARTIALLY_SUCCESSFUL,
AUTH_SUCCESSFUL,
OPEN_SUCCEEDED,
OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED,
InteractiveQuery,
Transport,
)
from paramiko.ssh_gss import GSS_AUTH_AVAILABLE
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import padding, rsa
tests_dir = dirname(realpath(__file__))
from ._loop import LoopSocket
def _support(filename):
base = Path(tests_dir)
top = base / filename
deeper = base / "_support" / filename
return str(deeper if deeper.exists() else top)
def _config(name):
return join(tests_dir, "configs", name)
needs_gssapi = pytest.mark.skipif(
not GSS_AUTH_AVAILABLE, reason="No GSSAPI to test"
)
def needs_builtin(name):
"""
Skip decorated test if builtin name does not exist.
"""
reason = "Test requires a builtin '{}'".format(name)
return pytest.mark.skipif(not hasattr(builtins, name), reason=reason)
slow = pytest.mark.slow
# GSSAPI / Kerberos related tests need a working Kerberos environment.
# The class `KerberosTestCase` provides such an environment or skips all tests.
# There are 3 distinct cases:
#
# - A Kerberos environment has already been created and the environment
# contains the required information.
#
# - We can use the package 'k5test' to setup an working kerberos environment on
# the fly.
#
# - We skip all tests.
#
# ToDo: add a Windows specific implementation?
if (
os.environ.get("K5TEST_USER_PRINC", None)
and os.environ.get("K5TEST_HOSTNAME", None)
and os.environ.get("KRB5_KTNAME", None)
): # add other vars as needed
# The environment provides the required information
class DummyK5Realm:
def __init__(self):
for k in os.environ:
if not k.startswith("K5TEST_"):
continue
setattr(self, k[7:].lower(), os.environ[k])
self.env = {}
class KerberosTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.realm = DummyK5Realm()
@classmethod
def tearDownClass(cls):
del cls.realm
else:
try:
# Try to setup a kerberos environment
from k5test import KerberosTestCase
except Exception:
# Use a dummy, that skips all tests
class KerberosTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
raise unittest.SkipTest(
"Missing extension package k5test. "
'Please run "pip install k5test" '
"to install it."
)
def update_env(testcase, mapping, env=os.environ):
"""Modify os.environ during a test case and restore during cleanup."""
saved_env = env.copy()
def replace(target, source):
target.update(source)
for k in list(target):
if k not in source:
target.pop(k, None)
testcase.addCleanup(replace, env, saved_env)
env.update(mapping)
return testcase
def k5shell(args=None):
"""Create a shell with an kerberos environment
This can be used to debug paramiko or to test the old GSSAPI.
To test a different GSSAPI, simply activate a suitable venv
within the shell.
"""
import k5test
import atexit
import subprocess
k5 = k5test.K5Realm()
atexit.register(k5.stop)
os.environ.update(k5.env)
for n in ("realm", "user_princ", "hostname"):
os.environ["K5TEST_" + n.upper()] = getattr(k5, n)
if not args:
args = sys.argv[1:]
if not args:
args = [os.environ.get("SHELL", "bash")]
sys.exit(subprocess.call(args))
def is_low_entropy():
"""
Attempts to detect whether running interpreter is low-entropy.
"low-entropy" is defined as being in 32-bit mode and with the hash seed set
to zero.
"""
is_32bit = struct.calcsize("P") == 32 / 8
# I don't see a way to tell internally if the hash seed was set this
# way, but env should be plenty sufficient, this is only for testing.
return is_32bit and os.environ.get("PYTHONHASHSEED", None) == "0"
def sha1_signing_unsupported():
"""
This is used to skip tests in environments where SHA-1 signing is
not supported by the backend.
"""
private_key = rsa.generate_private_key(
public_exponent=65537, key_size=2048, backend=default_backend()
)
message = b"Some dummy text"
try:
private_key.sign(
message,
padding.PSS(
mgf=padding.MGF1(hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH,
),
hashes.SHA1(),
)
return False
except UnsupportedAlgorithm as e:
return e._reason == _Reasons.UNSUPPORTED_HASH
requires_sha1_signing = unittest.skipIf(
sha1_signing_unsupported(), "SHA-1 signing not supported"
)
_disable_sha2 = dict(
disabled_algorithms=dict(keys=["rsa-sha2-256", "rsa-sha2-512"])
)
_disable_sha1 = dict(disabled_algorithms=dict(keys=["ssh-rsa"]))
_disable_sha2_pubkey = dict(
disabled_algorithms=dict(pubkeys=["rsa-sha2-256", "rsa-sha2-512"])
)
_disable_sha1_pubkey = dict(disabled_algorithms=dict(pubkeys=["ssh-rsa"]))
unicodey = "\u2022"
class TestServer(ServerInterface):
paranoid_did_password = False
paranoid_did_public_key = False
# TODO: make this ed25519 or something else modern? (_is_ this used??)
paranoid_key = DSSKey.from_private_key_file(_support("dss.key"))
def __init__(self, allowed_keys=None):
self.allowed_keys = allowed_keys if allowed_keys is not None else []
def check_channel_request(self, kind, chanid):
if kind == "bogus":
return OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
return OPEN_SUCCEEDED
def check_channel_exec_request(self, channel, command):
if command != b"yes":
return False
return True
def check_channel_shell_request(self, channel):
return True
def check_global_request(self, kind, msg):
self._global_request = kind
# NOTE: for w/e reason, older impl of this returned False always, even
# tho that's only supposed to occur if the request cannot be served.
# For now, leaving that the default unless test supplies specific
# 'acceptable' request kind
return kind == "acceptable"
def check_channel_x11_request(
self,
channel,
single_connection,
auth_protocol,
auth_cookie,
screen_number,
):
self._x11_single_connection = single_connection
self._x11_auth_protocol = auth_protocol
self._x11_auth_cookie = auth_cookie
self._x11_screen_number = screen_number
return True
def check_port_forward_request(self, addr, port):
self._listen = socket.socket()
self._listen.bind(("127.0.0.1", 0))
self._listen.listen(1)
return self._listen.getsockname()[1]
def cancel_port_forward_request(self, addr, port):
self._listen.close()
self._listen = None
def check_channel_direct_tcpip_request(self, chanid, origin, destination):
self._tcpip_dest = destination
return OPEN_SUCCEEDED
def get_allowed_auths(self, username):
if username == "slowdive":
return "publickey,password"
if username == "paranoid":
if (
not self.paranoid_did_password
and not self.paranoid_did_public_key
):
return "publickey,password"
elif self.paranoid_did_password:
return "publickey"
else:
return "password"
if username == "commie":
return "keyboard-interactive"
if username == "utf8":
return "password"
if username == "non-utf8":
return "password"
return "publickey"
def check_auth_password(self, username, password):
if (username == "slowdive") and (password == "pygmalion"):
return AUTH_SUCCESSFUL
if (username == "paranoid") and (password == "paranoid"):
# 2-part auth (even openssh doesn't support this)
self.paranoid_did_password = True
if self.paranoid_did_public_key:
return AUTH_SUCCESSFUL
return AUTH_PARTIALLY_SUCCESSFUL
if (username == "utf8") and (password == unicodey):
return AUTH_SUCCESSFUL
if (username == "non-utf8") and (password == "\xff"):
return AUTH_SUCCESSFUL
if username == "bad-server":
raise Exception("Ack!")
if username == "unresponsive-server":
time.sleep(5)
return AUTH_SUCCESSFUL
return AUTH_FAILED
def check_auth_publickey(self, username, key):
if (username == "paranoid") and (key == self.paranoid_key):
# 2-part auth
self.paranoid_did_public_key = True
if self.paranoid_did_password:
return AUTH_SUCCESSFUL
return AUTH_PARTIALLY_SUCCESSFUL
# TODO: make sure all tests incidentally using this to pass, _without
# sending a username oops_, get updated somehow - probably via server()
# default always injecting a username
elif key in self.allowed_keys:
return AUTH_SUCCESSFUL
return AUTH_FAILED
def check_auth_interactive(self, username, submethods):
if username == "commie":
self.username = username
return InteractiveQuery(
"password", "Please enter a password.", ("Password", False)
)
return AUTH_FAILED
def check_auth_interactive_response(self, responses):
if self.username == "commie":
if (len(responses) == 1) and (responses[0] == "cat"):
return AUTH_SUCCESSFUL
return AUTH_FAILED
@contextmanager
def server(
hostkey=None,
init=None,
server_init=None,
client_init=None,
connect=None,
pubkeys=None,
catch_error=False,
transport_factory=None,
server_transport_factory=None,
defer=False,
skip_verify=False,
):
"""
SSH server contextmanager for testing.
Yields a tuple of ``(tc, ts)`` (client- and server-side `Transport`
objects), or ``(tc, ts, err)`` when ``catch_error==True``.
:param hostkey:
Host key to use for the server; if None, loads
``rsa.key``.
:param init:
Default `Transport` constructor kwargs to use for both sides.
:param server_init:
Extends and/or overrides ``init`` for server transport only.
:param client_init:
Extends and/or overrides ``init`` for client transport only.
:param connect:
Kwargs to use for ``connect()`` on the client.
:param pubkeys:
List of public keys for auth.
:param catch_error:
Whether to capture connection errors & yield from contextmanager.
Necessary for connection_time exception testing.
:param transport_factory:
Like the same-named param in SSHClient: which Transport class to use.
:param server_transport_factory:
Like ``transport_factory``, but only impacts the server transport.
:param bool defer:
Whether to defer authentication during connecting.
This is really just shorthand for ``connect={}`` which would do roughly
the same thing. Also: this implies skip_verify=True automatically!
:param bool skip_verify:
Whether NOT to do the default "make sure auth passed" check.
"""
if init is None:
init = {}
if server_init is None:
server_init = {}
if client_init is None:
client_init = {}
if connect is None:
# No auth at all please
if defer:
connect = dict()
# Default username based auth
else:
connect = dict(username="slowdive", password="pygmalion")
socks = LoopSocket()
sockc = LoopSocket()
sockc.link(socks)
if transport_factory is None:
transport_factory = Transport
if server_transport_factory is None:
server_transport_factory = transport_factory
tc = transport_factory(sockc, **dict(init, **client_init))
ts = server_transport_factory(socks, **dict(init, **server_init))
if hostkey is None:
hostkey = RSAKey.from_private_key_file(_support("rsa.key"))
ts.add_server_key(hostkey)
event = threading.Event()
server = TestServer(allowed_keys=pubkeys)
assert not event.is_set()
assert not ts.is_active()
assert tc.get_username() is None
assert ts.get_username() is None
assert not tc.is_authenticated()
assert not ts.is_authenticated()
err = None
# Trap errors and yield instead of raising right away; otherwise callers
# cannot usefully deal with problems at connect time which stem from errors
# in the server side.
try:
ts.start_server(event, server)
tc.connect(**connect)
event.wait(1.0)
assert event.is_set()
assert ts.is_active()
assert tc.is_active()
except Exception as e:
if not catch_error:
raise
err = e
yield (tc, ts, err) if catch_error else (tc, ts)
if not (catch_error or skip_verify or defer):
assert ts.is_authenticated()
assert tc.is_authenticated()
tc.close()
ts.close()
socks.close()
sockc.close()
def wait_until(condition, *, timeout=2):
"""
Wait until `condition()` no longer raises an `AssertionError` or until
`timeout` seconds have passed, which causes a `TimeoutError` to be raised.
"""
deadline = time.time() + timeout
while True:
try:
condition()
except AssertionError as e:
if time.time() > deadline:
timeout_message = f"Condition not reached after {timeout}s"
raise TimeoutError(timeout_message) from e
else:
return
time.sleep(0.01)
| 14,525 | Python | .py | 397 | 29.261965 | 79 | 0.645942 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
571 | __init__.py | paramiko_paramiko/tests/__init__.py | """Base classes and helpers for testing paramiko."""
import functools
import locale
import os
from pytest import skip
# List of locales which have non-ascii characters in all categories.
# Omits most European languages which for instance may have only some months
# with names that include accented characters.
_non_ascii_locales = [
# East Asian locales
"ja_JP",
"ko_KR",
"zh_CN",
"zh_TW",
# European locales with non-latin alphabets
"el_GR",
"ru_RU",
"uk_UA",
]
# Also include UTF-8 versions of these locales
_non_ascii_locales.extend([name + ".utf8" for name in _non_ascii_locales])
def requireNonAsciiLocale(category_name="LC_ALL"):
"""Run decorated test under a non-ascii locale or skip if not possible."""
if os.name != "posix":
return skip("Non-posix OSes don't really use C locales")
cat = getattr(locale, category_name)
return functools.partial(_decorate_with_locale, cat, _non_ascii_locales)
def _decorate_with_locale(category, try_locales, test_method):
"""Decorate test_method to run after switching to a different locale."""
def _test_under_locale(testself, *args, **kwargs):
original = locale.setlocale(category)
while try_locales:
try:
locale.setlocale(category, try_locales[0])
except locale.Error:
# Mutating original list is ok, setlocale would keep failing
try_locales.pop(0)
else:
try:
return test_method(testself, *args, **kwargs)
finally:
locale.setlocale(category, original)
# No locales could be used? Just skip the decorated test :(
skip("No usable locales installed")
functools.update_wrapper(_test_under_locale, test_method)
return _test_under_locale
| 1,852 | Python | .py | 46 | 33.304348 | 78 | 0.665367 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
572 | test_proxy.py | paramiko_paramiko/tests/test_proxy.py | import signal
import socket
from unittest.mock import patch
from pytest import raises
from paramiko import ProxyCommand, ProxyCommandFailure
class TestProxyCommand:
@patch("paramiko.proxy.subprocess")
def test_init_takes_command_string(self, subprocess):
ProxyCommand(command_line="do a thing")
subprocess.Popen.assert_called_once_with(
["do", "a", "thing"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
bufsize=0,
)
@patch("paramiko.proxy.subprocess.Popen")
def test_send_writes_to_process_stdin_returning_length(self, Popen):
proxy = ProxyCommand("hi")
written = proxy.send(b"data")
Popen.return_value.stdin.write.assert_called_once_with(b"data")
assert written == len(b"data")
@patch("paramiko.proxy.subprocess.Popen")
def test_send_raises_ProxyCommandFailure_on_error(self, Popen):
Popen.return_value.stdin.write.side_effect = IOError(0, "whoops")
with raises(ProxyCommandFailure) as info:
ProxyCommand("hi").send("data")
assert info.value.command == "hi"
assert info.value.error == "whoops"
@patch("paramiko.proxy.subprocess.Popen")
@patch("paramiko.proxy.os.read")
@patch("paramiko.proxy.select")
def test_recv_reads_from_process_stdout_returning_bytes(
self, select, os_read, Popen
):
stdout = Popen.return_value.stdout
select.return_value = [stdout], None, None
fileno = stdout.fileno.return_value
# Force os.read to return smaller-than-requested chunks
os_read.side_effect = [b"was", b"t", b"e", b"of ti", b"me"]
proxy = ProxyCommand("hi")
# Ask for 5 bytes (ie b"waste")
data = proxy.recv(5)
# Ensure we got "waste" stitched together
assert data == b"waste"
# Ensure the calls happened in the sizes expected (starting with the
# initial "I want all 5 bytes", followed by "I want whatever I believe
# should be left after what I've already read", until done)
assert [x[0] for x in os_read.call_args_list] == [
(fileno, 5), # initial
(fileno, 2), # I got 3, want 2 more
(fileno, 1), # I've now got 4, want 1 more
]
@patch("paramiko.proxy.subprocess.Popen")
@patch("paramiko.proxy.os.read")
@patch("paramiko.proxy.select")
def test_recv_returns_buffer_on_timeout_if_any_read(
self, select, os_read, Popen
):
stdout = Popen.return_value.stdout
select.return_value = [stdout], None, None
fileno = stdout.fileno.return_value
os_read.side_effect = [b"was", socket.timeout]
proxy = ProxyCommand("hi")
data = proxy.recv(5)
assert data == b"was" # not b"waste"
assert os_read.call_args[0] == (fileno, 2)
@patch("paramiko.proxy.subprocess.Popen")
@patch("paramiko.proxy.os.read")
@patch("paramiko.proxy.select")
def test_recv_raises_timeout_if_nothing_read(self, select, os_read, Popen):
stdout = Popen.return_value.stdout
select.return_value = [stdout], None, None
fileno = stdout.fileno.return_value
os_read.side_effect = socket.timeout
proxy = ProxyCommand("hi")
with raises(socket.timeout):
proxy.recv(5)
assert os_read.call_args[0] == (fileno, 5)
@patch("paramiko.proxy.subprocess.Popen")
@patch("paramiko.proxy.os.read")
@patch("paramiko.proxy.select")
def test_recv_raises_ProxyCommandFailure_on_non_timeout_error(
self, select, os_read, Popen
):
select.return_value = [Popen.return_value.stdout], None, None
os_read.side_effect = IOError(0, "whoops")
with raises(ProxyCommandFailure) as info:
ProxyCommand("hi").recv(5)
assert info.value.command == "hi"
assert info.value.error == "whoops"
@patch("paramiko.proxy.subprocess.Popen")
@patch("paramiko.proxy.os.kill")
def test_close_kills_subprocess(self, os_kill, Popen):
proxy = ProxyCommand("hi")
proxy.close()
os_kill.assert_called_once_with(Popen.return_value.pid, signal.SIGTERM)
@patch("paramiko.proxy.subprocess.Popen")
def test_closed_exposes_whether_subprocess_has_exited(self, Popen):
proxy = ProxyCommand("hi")
Popen.return_value.returncode = None
assert proxy.closed is False
assert proxy._closed is False
Popen.return_value.returncode = 0
assert proxy.closed is True
assert proxy._closed is True
@patch("paramiko.proxy.time.time")
@patch("paramiko.proxy.subprocess.Popen")
@patch("paramiko.proxy.os.read")
@patch("paramiko.proxy.select")
def test_timeout_affects_whether_timeout_is_raised(
self, select, os_read, Popen, time
):
stdout = Popen.return_value.stdout
select.return_value = [stdout], None, None
# Base case: None timeout means no timing out
os_read.return_value = b"meh"
proxy = ProxyCommand("hello")
assert proxy.timeout is None
# Implicit 'no raise' check
assert proxy.recv(3) == b"meh"
# Use settimeout to set timeout, and it is honored
time.side_effect = [0, 10] # elapsed > 7
proxy = ProxyCommand("ohnoz")
proxy.settimeout(7)
assert proxy.timeout == 7
with raises(socket.timeout):
proxy.recv(3)
@patch("paramiko.proxy.subprocess", new=None)
@patch("paramiko.proxy.subprocess_import_error", new=ImportError("meh"))
def test_raises_subprocess_ImportErrors_at_runtime(self):
# Not an ideal test, but I don't know of a non-bad way to fake out
# module-time ImportErrors. So we mock the symptoms. Meh!
with raises(ImportError) as info:
ProxyCommand("hi!!!")
assert str(info.value) == "meh"
| 5,950 | Python | .py | 136 | 35.764706 | 79 | 0.645345 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
573 | pkey.py | paramiko_paramiko/tests/pkey.py | from pathlib import Path
from unittest.mock import patch, call
from pytest import raises
from cryptography.hazmat.primitives.asymmetric.ed448 import Ed448PrivateKey
from paramiko import (
DSSKey,
ECDSAKey,
Ed25519Key,
Message,
PKey,
PublicBlob,
RSAKey,
UnknownKeyType,
)
from ._util import _support
class PKey_:
# NOTE: this is incidentally tested by a number of other tests, such as the
# agent.py test suite
class from_type_string:
def loads_from_type_and_bytes(self, keys):
obj = PKey.from_type_string(keys.full_type, keys.pkey.asbytes())
assert obj == keys.pkey
# TODO: exceptions
#
# TODO: passphrase? OTOH since this is aimed at the agent...irrelephant
class from_path:
def loads_from_Path(self, keys):
obj = PKey.from_path(keys.path)
assert obj == keys.pkey
def loads_from_str(self):
key = PKey.from_path(str(_support("rsa.key")))
assert isinstance(key, RSAKey)
@patch("paramiko.pkey.Path")
def expands_user(self, mPath):
# real key for guts that want a real key format
mykey = Path(_support("rsa.key"))
pathy = mPath.return_value.expanduser.return_value
# read_bytes for cryptography.io's loaders
pathy.read_bytes.return_value = mykey.read_bytes()
# open() for our own class loader
pathy.open.return_value = mykey.open()
# fake out exists() to avoid attempts to load cert
pathy.exists.return_value = False
PKey.from_path("whatever") # we're not testing expanduser itself
# Both key and cert paths
mPath.return_value.expanduser.assert_has_calls([call(), call()])
def raises_UnknownKeyType_for_unknown_types(self):
# I.e. a real, becomes a useful object via cryptography.io, key
# class that we do NOT support. Chose Ed448 randomly as OpenSSH
# doesn't seem to support it either, going by ssh-keygen...
keypath = _support("ed448.key")
with raises(UnknownKeyType) as exc:
PKey.from_path(keypath)
assert issubclass(exc.value.key_type, Ed448PrivateKey)
with open(keypath, "rb") as fd:
assert exc.value.key_bytes == fd.read()
def leaves_cryptography_exceptions_untouched(self):
# a Python file is not a private key!
with raises(ValueError):
PKey.from_path(__file__)
# TODO: passphrase support tested
class automatically_loads_certificates:
def existing_cert_loaded_when_given_key_path(self):
key = PKey.from_path(_support("rsa.key"))
# Public blob exists despite no .load_certificate call
assert key.public_blob is not None
assert (
key.public_blob.key_type == "[email protected]"
)
# And it's definitely the one we expected
assert key.public_blob == PublicBlob.from_file(
_support("rsa.key-cert.pub")
)
def can_be_given_cert_path_instead(self):
key = PKey.from_path(_support("rsa.key-cert.pub"))
# It's still a key, not a PublicBlob
assert isinstance(key, RSAKey)
# Public blob exists despite no .load_certificate call
assert key.public_blob is not None
assert (
key.public_blob.key_type == "[email protected]"
)
# And it's definitely the one we expected
assert key.public_blob == PublicBlob.from_file(
_support("rsa.key-cert.pub")
)
def no_cert_load_if_no_cert(self):
# This key exists (it's a copy of the regular one) but has no
# matching -cert.pub
key = PKey.from_path(_support("rsa-lonely.key"))
assert key.public_blob is None
def excepts_usefully_if_no_key_only_cert(self):
# TODO: is that truly an error condition? the cert is ~the
# pubkey and we still require the privkey for signing, yea?
# This cert exists (it's a copy of the regular one) but there's
# no rsa-missing.key to load.
with raises(FileNotFoundError) as info:
PKey.from_path(_support("rsa-missing.key-cert.pub"))
assert info.value.filename.endswith("rsa-missing.key")
class load_certificate:
def rsa_public_cert_blobs(self):
# Data to test signing with (arbitrary)
data = b"ice weasels"
# Load key w/o cert at first (so avoiding .from_path)
key = RSAKey.from_private_key_file(_support("rsa.key"))
assert key.public_blob is None
# Sign regular-style (using, arbitrarily, SHA2)
msg = key.sign_ssh_data(data, "rsa-sha2-256")
msg.rewind()
assert "rsa-sha2-256" == msg.get_text()
signed = msg.get_binary() # for comparison later
# Load cert and inspect its internals
key.load_certificate(_support("rsa.key-cert.pub"))
assert key.public_blob is not None
assert key.public_blob.key_type == "[email protected]"
assert key.public_blob.comment == "test_rsa.key.pub"
msg = Message(key.public_blob.key_blob)
# cert type
assert msg.get_text() == "[email protected]"
# nonce
msg.get_string()
# public numbers
assert msg.get_mpint() == key.public_numbers.e
assert msg.get_mpint() == key.public_numbers.n
# serial number
assert msg.get_int64() == 1234
# TODO: whoever wrote the OG tests didn't care about the remaining
# fields from
# https://github.com/openssh/openssh-portable/blob/master/PROTOCOL.certkeys
# so neither do I, for now...
# Sign cert-style (still SHA256 - so this actually does almost
# exactly the same thing under the hood as the previous sign)
msg = key.sign_ssh_data(data, "[email protected]")
msg.rewind()
assert "rsa-sha2-256" == msg.get_text()
assert signed == msg.get_binary() # same signature as above
msg.rewind()
assert key.verify_ssh_sig(b"ice weasels", msg) # our data verified
def loading_cert_of_different_type_from_key_raises_ValueError(self):
edkey = Ed25519Key.from_private_key_file(_support("ed25519.key"))
err = "PublicBlob type [email protected] incompatible with key type ssh-ed25519" # noqa
with raises(ValueError, match=err):
edkey.load_certificate(_support("rsa.key-cert.pub"))
def fingerprint(self, keys):
# NOTE: Hardcoded fingerprint expectation stored in fixture.
assert keys.pkey.fingerprint == keys.expected_fp
def algorithm_name(self, keys):
key = keys.pkey
if isinstance(key, RSAKey):
assert key.algorithm_name == "RSA"
elif isinstance(key, DSSKey):
assert key.algorithm_name == "DSS"
elif isinstance(key, ECDSAKey):
assert key.algorithm_name == "ECDSA"
elif isinstance(key, Ed25519Key):
assert key.algorithm_name == "ED25519"
# TODO: corner case: AgentKey, whose .name can be cert-y (due to the
# value of the name field passed via agent protocol) and thus
# algorithm_name is eg "RSA-CERT" - keys loaded directly from disk will
# never look this way, even if they have a .public_blob attached.
class equality_and_hashing:
def same_key_is_equal_to_itself(self, keys):
assert keys.pkey == keys.pkey2
def same_key_same_hash(self, keys):
# NOTE: this isn't a great test due to hashseed randomization under
# Python 3 preventing use of static values, but it does still prove
# that __hash__ is implemented/doesn't explode & works across
# instances
assert hash(keys.pkey) == hash(keys.pkey2)
def keys_are_not_equal_to_other_types(self, keys):
for value in [None, True, ""]:
assert keys.pkey != value
class identifiers_classmethods:
def default_is_class_name_attribute(self):
# NOTE: not all classes _have_ this, only the ones that don't
# customize identifiers().
class MyKey(PKey):
name = "it me"
assert MyKey.identifiers() == ["it me"]
def rsa_is_all_combos_of_cert_and_sha_type(self):
assert RSAKey.identifiers() == [
"ssh-rsa",
"[email protected]",
"rsa-sha2-256",
"[email protected]",
"rsa-sha2-512",
"[email protected]",
]
def dss_is_protocol_name(self):
assert DSSKey.identifiers() == ["ssh-dss"]
def ed25519_is_protocol_name(self):
assert Ed25519Key.identifiers() == ["ssh-ed25519"]
def ecdsa_is_all_curve_names(self):
assert ECDSAKey.identifiers() == [
"ecdsa-sha2-nistp256",
"ecdsa-sha2-nistp384",
"ecdsa-sha2-nistp521",
]
| 9,714 | Python | .py | 198 | 36.671717 | 111 | 0.587032 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
574 | _stub_sftp.py | paramiko_paramiko/tests/_stub_sftp.py | # Copyright (C) 2003-2009 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
A stub SFTP server for loopback SFTP testing.
"""
import os
from paramiko import (
AUTH_SUCCESSFUL,
OPEN_SUCCEEDED,
SFTPAttributes,
SFTPHandle,
SFTPServer,
SFTPServerInterface,
SFTP_FAILURE,
SFTP_OK,
ServerInterface,
)
from paramiko.common import o666
class StubServer(ServerInterface):
def check_auth_password(self, username, password):
# all are allowed
return AUTH_SUCCESSFUL
def check_channel_request(self, kind, chanid):
return OPEN_SUCCEEDED
class StubSFTPHandle(SFTPHandle):
def stat(self):
try:
return SFTPAttributes.from_stat(os.fstat(self.readfile.fileno()))
except OSError as e:
return SFTPServer.convert_errno(e.errno)
def chattr(self, attr):
# python doesn't have equivalents to fchown or fchmod, so we have to
# use the stored filename
try:
SFTPServer.set_file_attr(self.filename, attr)
return SFTP_OK
except OSError as e:
return SFTPServer.convert_errno(e.errno)
class StubSFTPServer(SFTPServerInterface):
# assume current folder is a fine root
# (the tests always create and eventually delete a subfolder, so there
# shouldn't be any mess)
ROOT = os.getcwd()
def _realpath(self, path):
return self.ROOT + self.canonicalize(path)
def list_folder(self, path):
path = self._realpath(path)
try:
out = []
flist = os.listdir(path)
for fname in flist:
attr = SFTPAttributes.from_stat(
os.stat(os.path.join(path, fname))
)
attr.filename = fname
out.append(attr)
return out
except OSError as e:
return SFTPServer.convert_errno(e.errno)
def stat(self, path):
path = self._realpath(path)
try:
return SFTPAttributes.from_stat(os.stat(path))
except OSError as e:
return SFTPServer.convert_errno(e.errno)
def lstat(self, path):
path = self._realpath(path)
try:
return SFTPAttributes.from_stat(os.lstat(path))
except OSError as e:
return SFTPServer.convert_errno(e.errno)
def open(self, path, flags, attr):
path = self._realpath(path)
try:
binary_flag = getattr(os, "O_BINARY", 0)
flags |= binary_flag
mode = getattr(attr, "st_mode", None)
if mode is not None:
fd = os.open(path, flags, mode)
else:
# os.open() defaults to 0777 which is
# an odd default mode for files
fd = os.open(path, flags, o666)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
if (flags & os.O_CREAT) and (attr is not None):
attr._flags &= ~attr.FLAG_PERMISSIONS
SFTPServer.set_file_attr(path, attr)
if flags & os.O_WRONLY:
if flags & os.O_APPEND:
fstr = "ab"
else:
fstr = "wb"
elif flags & os.O_RDWR:
if flags & os.O_APPEND:
fstr = "a+b"
else:
fstr = "r+b"
else:
# O_RDONLY (== 0)
fstr = "rb"
try:
f = os.fdopen(fd, fstr)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
fobj = StubSFTPHandle(flags)
fobj.filename = path
fobj.readfile = f
fobj.writefile = f
return fobj
def remove(self, path):
path = self._realpath(path)
try:
os.remove(path)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
return SFTP_OK
def rename(self, oldpath, newpath):
oldpath = self._realpath(oldpath)
newpath = self._realpath(newpath)
if os.path.exists(newpath):
return SFTP_FAILURE
try:
os.rename(oldpath, newpath)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
return SFTP_OK
def posix_rename(self, oldpath, newpath):
oldpath = self._realpath(oldpath)
newpath = self._realpath(newpath)
try:
os.rename(oldpath, newpath)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
return SFTP_OK
def mkdir(self, path, attr):
path = self._realpath(path)
try:
os.mkdir(path)
if attr is not None:
SFTPServer.set_file_attr(path, attr)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
return SFTP_OK
def rmdir(self, path):
path = self._realpath(path)
try:
os.rmdir(path)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
return SFTP_OK
def chattr(self, path, attr):
path = self._realpath(path)
try:
SFTPServer.set_file_attr(path, attr)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
return SFTP_OK
def symlink(self, target_path, path):
path = self._realpath(path)
if (len(target_path) > 0) and (target_path[0] == "/"):
# absolute symlink
target_path = os.path.join(self.ROOT, target_path[1:])
if target_path[:2] == "//":
# bug in os.path.join
target_path = target_path[1:]
else:
# compute relative to path
abspath = os.path.join(os.path.dirname(path), target_path)
if abspath[: len(self.ROOT)] != self.ROOT:
# this symlink isn't going to work anyway -- just break it
# immediately
target_path = "<error>"
try:
os.symlink(target_path, path)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
return SFTP_OK
def readlink(self, path):
path = self._realpath(path)
try:
symlink = os.readlink(path)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
# if it's absolute, remove the root
if os.path.isabs(symlink):
if symlink[: len(self.ROOT)] == self.ROOT:
symlink = symlink[len(self.ROOT) :]
if (len(symlink) == 0) or (symlink[0] != "/"):
symlink = "/" + symlink
else:
symlink = "<error>"
return symlink
| 7,453 | Python | .py | 208 | 26.331731 | 79 | 0.586761 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
575 | test_buffered_pipe.py | paramiko_paramiko/tests/test_buffered_pipe.py | # Copyright (C) 2006-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Some unit tests for BufferedPipe.
"""
import threading
import time
import unittest
from paramiko.buffered_pipe import BufferedPipe, PipeTimeout
from paramiko import pipe
def delay_thread(p):
p.feed("a")
time.sleep(0.5)
p.feed("b")
p.close()
def close_thread(p):
time.sleep(0.2)
p.close()
class BufferedPipeTest(unittest.TestCase):
def test_buffered_pipe(self):
p = BufferedPipe()
self.assertTrue(not p.read_ready())
p.feed("hello.")
self.assertTrue(p.read_ready())
data = p.read(6)
self.assertEqual(b"hello.", data)
p.feed("plus/minus")
self.assertEqual(b"plu", p.read(3))
self.assertEqual(b"s/m", p.read(3))
self.assertEqual(b"inus", p.read(4))
p.close()
self.assertTrue(not p.read_ready())
self.assertEqual(b"", p.read(1))
def test_delay(self):
p = BufferedPipe()
self.assertTrue(not p.read_ready())
threading.Thread(target=delay_thread, args=(p,)).start()
self.assertEqual(b"a", p.read(1, 0.1))
try:
p.read(1, 0.1)
self.assertTrue(False)
except PipeTimeout:
pass
self.assertEqual(b"b", p.read(1, 1.0))
self.assertEqual(b"", p.read(1))
def test_close_while_reading(self):
p = BufferedPipe()
threading.Thread(target=close_thread, args=(p,)).start()
data = p.read(1, 1.0)
self.assertEqual(b"", data)
def test_or_pipe(self):
p = pipe.make_pipe()
p1, p2 = pipe.make_or_pipe(p)
self.assertFalse(p._set)
p1.set()
self.assertTrue(p._set)
p2.set()
self.assertTrue(p._set)
p1.clear()
self.assertTrue(p._set)
p2.clear()
self.assertFalse(p._set)
| 2,638 | Python | .py | 77 | 28.350649 | 79 | 0.651355 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
576 | test_util.py | paramiko_paramiko/tests/test_util.py | # Copyright (C) 2003-2009 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Some unit tests for utility functions.
"""
from binascii import hexlify
import os
from hashlib import sha1
import unittest
import paramiko
import paramiko.util
from paramiko.util import safe_string
test_hosts_file = """\
secure.example.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEA1PD6U2/TVxET6lkpKhOk5r\
9q/kAYG6sP9f5zuUYP8i7FOFp/6ncCEbbtg/lB+A3iidyxoSWl+9jtoyyDOOVX4UIDV9G11Ml8om3\
D+jrpI9cycZHqilK0HmxDeCuxbwyMuaCygU9gS2qoRvNLWZk70OpIKSSpBo0Wl3/XUmz9uhc=
happy.example.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEA8bP1ZA7DCZDB9J0s50l31M\
BGQ3GQ/Fc7SX6gkpXkwcZryoi4kNFhHu5LvHcZPdxXV1D+uTMfGS1eyd2Yz/DoNWXNAl8TI0cAsW\
5ymME3bQ4J/k1IKxCtz/bAlAqFgKoc+EolMziDYqWIATtW0rYTJvzGAzTmMj80/QpsFH+Pc2M=
"""
class UtilTest(unittest.TestCase):
def test_imports(self):
"""
verify that all the classes can be imported from paramiko.
"""
for name in (
"Agent",
"AgentKey",
"AuthenticationException",
"AuthFailure",
"AuthHandler",
"AuthResult",
"AuthSource",
"AuthStrategy",
"AutoAddPolicy",
"BadAuthenticationType",
"BufferedFile",
"Channel",
"ChannelException",
"ConfigParseError",
"CouldNotCanonicalize",
"DSSKey",
"HostKeys",
"InMemoryPrivateKey",
"Message",
"MissingHostKeyPolicy",
"NoneAuth",
"OnDiskPrivateKey",
"Password",
"PasswordRequiredException",
"PrivateKey",
"RSAKey",
"RejectPolicy",
"SFTP",
"SFTPAttributes",
"SFTPClient",
"SFTPError",
"SFTPFile",
"SFTPHandle",
"SFTPServer",
"SFTPServerInterface",
"SSHClient",
"SSHConfig",
"SSHConfigDict",
"SSHException",
"SecurityOptions",
"ServerInterface",
"SourceResult",
"SubsystemHandler",
"Transport",
"WarningPolicy",
"util",
):
assert name in dir(paramiko)
def test_generate_key_bytes(self):
key_bytes = paramiko.util.generate_key_bytes(
sha1, b"ABCDEFGH", "This is my secret passphrase.", 64
)
hexy = "".join([f"{byte:02x}" for byte in key_bytes])
hexpected = "9110e2f6793b69363e58173e9436b13a5a4b339005741d5c680e505f57d871347b4239f14fb5c46e857d5e100424873ba849ac699cea98d729e57b3e84378e8b" # noqa
assert hexy == hexpected
def test_host_keys(self):
with open("hostfile.temp", "w") as f:
f.write(test_hosts_file)
try:
hostdict = paramiko.util.load_host_keys("hostfile.temp")
assert 2 == len(hostdict)
assert 1 == len(list(hostdict.values())[0])
assert 1 == len(list(hostdict.values())[1])
fp = hexlify(
hostdict["secure.example.com"]["ssh-rsa"].get_fingerprint()
).upper()
assert b"E6684DB30E109B67B70FF1DC5C7F1363" == fp
finally:
os.unlink("hostfile.temp")
def test_clamp_value(self):
assert 32768 == paramiko.util.clamp_value(32767, 32768, 32769)
assert 32767 == paramiko.util.clamp_value(32767, 32765, 32769)
assert 32769 == paramiko.util.clamp_value(32767, 32770, 32769)
def test_safe_string(self):
vanilla = b"vanilla"
has_bytes = b"has \7\3 bytes"
safe_vanilla = safe_string(vanilla)
safe_has_bytes = safe_string(has_bytes)
expected_bytes = b"has %07%03 bytes"
err = "{!r} != {!r}"
msg = err.format(safe_vanilla, vanilla)
assert safe_vanilla == vanilla, msg
msg = err.format(safe_has_bytes, expected_bytes)
assert safe_has_bytes == expected_bytes, msg
| 4,758 | Python | .py | 125 | 29.552 | 158 | 0.638035 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
577 | test_sftp_big.py | paramiko_paramiko/tests/test_sftp_big.py | # Copyright (C) 2003-2009 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
some unit tests to make sure sftp works well with large files.
a real actual sftp server is contacted, and a new folder is created there to
do test file operations in (so no existing files will be harmed).
"""
import random
import struct
import sys
import time
from paramiko.common import o660
from ._util import slow, wait_until
@slow
class TestBigSFTP:
def test_lots_of_files(self, sftp):
"""
create a bunch of files over the same session.
"""
numfiles = 100
try:
for i in range(numfiles):
target = f"{sftp.FOLDER}/file{i}.txt"
with sftp.open(target, "w", 1) as f:
f.write(f"this is file #{i}.\n")
sftp.chmod(target, o660)
# now make sure every file is there, by creating a list of filenmes
# and reading them in random order.
numlist = list(range(numfiles))
while len(numlist) > 0:
r = numlist[random.randint(0, len(numlist) - 1)]
with sftp.open(f"{sftp.FOLDER}/file{r}.txt") as f:
assert f.readline() == f"this is file #{r}.\n"
numlist.remove(r)
finally:
for i in range(numfiles):
try:
sftp.remove(f"{sftp.FOLDER}/file{i}.txt")
except:
pass
def test_big_file(self, sftp):
"""
write a 1MB file with no buffering.
"""
kblob = 1024 * b"x"
start = time.time()
try:
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "w") as f:
for n in range(1024):
f.write(kblob)
if n % 128 == 0:
sys.stderr.write(".")
sys.stderr.write(" ")
assert (
sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
end = time.time()
sys.stderr.write(f"{round(end - start)}s")
start = time.time()
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "r") as f:
for n in range(1024):
data = f.read(1024)
assert data == kblob
end = time.time()
sys.stderr.write(f"{round(end - start)}s")
finally:
sftp.remove(f"{sftp.FOLDER}/hongry.txt")
def test_big_file_pipelined(self, sftp):
"""
write a 1MB file, with no linefeeds, using pipelining.
"""
kblob = bytes().join([struct.pack(">H", n) for n in range(512)])
start = time.time()
try:
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "wb") as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
if n % 128 == 0:
sys.stderr.write(".")
sys.stderr.write(" ")
assert (
sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
end = time.time()
sys.stderr.write(f"{round(end - start)}s")
start = time.time()
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f:
file_size = f.stat().st_size
f.prefetch(file_size)
# read on odd boundaries to make sure the bytes aren't getting
# scrambled
n = 0
k2blob = kblob + kblob
chunk = 629
size = 1024 * 1024
while n < size:
if n + chunk > size:
chunk = size - n
data = f.read(chunk)
offset = n % 1024
assert data == k2blob[offset : offset + chunk]
n += chunk
end = time.time()
sys.stderr.write(f"{round(end - start)}s")
finally:
sftp.remove(f"{sftp.FOLDER}/hongry.txt")
def test_prefetch_seek(self, sftp):
kblob = bytes().join([struct.pack(">H", n) for n in range(512)])
try:
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "wb") as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
if n % 128 == 0:
sys.stderr.write(".")
sys.stderr.write(" ")
assert (
sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
start = time.time()
k2blob = kblob + kblob
chunk = 793
for i in range(10):
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f:
file_size = f.stat().st_size
f.prefetch(file_size)
base_offset = (512 * 1024) + 17 * random.randint(
1000, 2000
)
offsets = [base_offset + j * chunk for j in range(100)]
# randomly seek around and read them out
for j in range(100):
offset = offsets[random.randint(0, len(offsets) - 1)]
offsets.remove(offset)
f.seek(offset)
data = f.read(chunk)
n_offset = offset % 1024
assert data == k2blob[n_offset : n_offset + chunk]
offset += chunk
end = time.time()
sys.stderr.write(f"{round(end - start)}s")
finally:
sftp.remove(f"{sftp.FOLDER}/hongry.txt")
def test_readv_seek(self, sftp):
kblob = bytes().join([struct.pack(">H", n) for n in range(512)])
try:
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "wb") as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
if n % 128 == 0:
sys.stderr.write(".")
sys.stderr.write(" ")
assert (
sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
start = time.time()
k2blob = kblob + kblob
chunk = 793
for i in range(10):
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f:
base_offset = (512 * 1024) + 17 * random.randint(
1000, 2000
)
# make a bunch of offsets and put them in random order
offsets = [base_offset + j * chunk for j in range(100)]
readv_list = []
for j in range(100):
o = offsets[random.randint(0, len(offsets) - 1)]
offsets.remove(o)
readv_list.append((o, chunk))
ret = f.readv(readv_list)
for i in range(len(readv_list)):
offset = readv_list[i][0]
n_offset = offset % 1024
assert next(ret) == k2blob[n_offset : n_offset + chunk]
end = time.time()
sys.stderr.write(f"{round(end - start)}s")
finally:
sftp.remove(f"{sftp.FOLDER}/hongry.txt")
def test_lots_of_prefetching(self, sftp):
"""
prefetch a 1MB file a bunch of times, discarding the file object
without using it, to verify that paramiko doesn't get confused.
"""
kblob = 1024 * b"x"
try:
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "w") as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
if n % 128 == 0:
sys.stderr.write(".")
sys.stderr.write(" ")
assert (
sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
for i in range(10):
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "r") as f:
file_size = f.stat().st_size
f.prefetch(file_size)
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "r") as f:
file_size = f.stat().st_size
f.prefetch(file_size)
for n in range(1024):
data = f.read(1024)
assert data == kblob
if n % 128 == 0:
sys.stderr.write(".")
sys.stderr.write(" ")
finally:
sftp.remove(f"{sftp.FOLDER}/hongry.txt")
def test_prefetch_readv(self, sftp):
"""
verify that prefetch and readv don't conflict with each other.
"""
kblob = bytes().join([struct.pack(">H", n) for n in range(512)])
try:
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "wb") as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
if n % 128 == 0:
sys.stderr.write(".")
sys.stderr.write(" ")
assert (
sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f:
file_size = f.stat().st_size
f.prefetch(file_size)
data = f.read(1024)
assert data == kblob
chunk_size = 793
base_offset = 512 * 1024
k2blob = kblob + kblob
chunks = [
(base_offset + (chunk_size * i), chunk_size)
for i in range(20)
]
for data in f.readv(chunks):
offset = base_offset % 1024
assert chunk_size == len(data)
assert k2blob[offset : offset + chunk_size] == data
base_offset += chunk_size
sys.stderr.write(" ")
finally:
sftp.remove(f"{sftp.FOLDER}/hongry.txt")
def test_large_readv(self, sftp):
"""
verify that a very large readv is broken up correctly and still
returned as a single blob.
"""
kblob = bytes().join([struct.pack(">H", n) for n in range(512)])
try:
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "wb") as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
if n % 128 == 0:
sys.stderr.write(".")
sys.stderr.write(" ")
assert (
sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f:
data = list(f.readv([(23 * 1024, 128 * 1024)]))
assert len(data) == 1
data = data[0]
assert len(data) == 128 * 1024
sys.stderr.write(" ")
finally:
sftp.remove(f"{sftp.FOLDER}/hongry.txt")
def test_big_file_big_buffer(self, sftp):
"""
write a 1MB file, with no linefeeds, and a big buffer.
"""
mblob = 1024 * 1024 * "x"
try:
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "w", 128 * 1024) as f:
f.write(mblob)
assert (
sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
finally:
sftp.remove(f"{sftp.FOLDER}/hongry.txt")
def test_big_file_renegotiate(self, sftp):
"""
write a 1MB file, forcing key renegotiation in the middle.
"""
t = sftp.sock.get_transport()
t.packetizer.REKEY_BYTES = 512 * 1024
k32blob = 32 * 1024 * "x"
try:
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "w", 128 * 1024) as f:
for i in range(32):
f.write(k32blob)
assert (
sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
assert t.H != t.session_id
# try to read it too.
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "r", 128 * 1024) as f:
file_size = f.stat().st_size
f.prefetch(file_size)
total = 0
while total < 1024 * 1024:
total += len(f.read(32 * 1024))
finally:
sftp.remove(f"{sftp.FOLDER}/hongry.txt")
t.packetizer.REKEY_BYTES = pow(2, 30)
def test_prefetch_limit(self, sftp):
"""
write a 1MB file and prefetch with a limit
"""
kblob = 1024 * b"x"
start = time.time()
def expect_prefetch_extents(file, expected_extents):
with file._prefetch_lock:
assert len(file._prefetch_extents) == expected_extents
try:
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "w") as f:
for n in range(1024):
f.write(kblob)
if n % 128 == 0:
sys.stderr.write(".")
sys.stderr.write(" ")
assert (
sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
end = time.time()
sys.stderr.write(f"{round(end - start)}s")
# read with prefetch, no limit
# expecting 32 requests (32k * 32 == 1M)
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f:
file_size = f.stat().st_size
f.prefetch(file_size)
wait_until(lambda: expect_prefetch_extents(f, 32))
# read with prefetch, limiting to 5 simultaneous requests
with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f:
file_size = f.stat().st_size
f.prefetch(file_size, 5)
wait_until(lambda: expect_prefetch_extents(f, 5))
for n in range(1024):
with f._prefetch_lock:
assert len(f._prefetch_extents) <= 5
data = f.read(1024)
assert data == kblob
if n % 128 == 0:
sys.stderr.write(".")
finally:
sftp.remove(f"{sftp.FOLDER}/hongry.txt")
| 15,272 | Python | .py | 368 | 27.336957 | 79 | 0.482566 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
578 | test_config.py | paramiko_paramiko/tests/test_config.py | # This file is part of Paramiko and subject to the license in /LICENSE in this
# repository
from os.path import expanduser
from socket import gaierror
try:
from invoke import Result
except ImportError:
Result = None
from unittest.mock import patch
from pytest import raises, mark, fixture
from paramiko import (
SSHConfig,
SSHConfigDict,
CouldNotCanonicalize,
ConfigParseError,
)
from ._util import _config
@fixture
def socket():
"""
Patch all of socket.* in our config module to prevent eg real DNS lookups.
Also forces getaddrinfo (used in our addressfamily lookup stuff) to always
fail by default to mimic usual lack of AddressFamily related crap.
Callers who want to mock DNS lookups can then safely assume gethostbyname()
will be in use.
"""
with patch("paramiko.config.socket") as mocket:
# Reinstate gaierror as an actual exception and not a sub-mock.
# (Presumably this would work with any exception, but why not use the
# real one?)
mocket.gaierror = gaierror
# Patch out getaddrinfo, used to detect family-specific IP lookup -
# only useful for a few specific tests.
mocket.getaddrinfo.side_effect = mocket.gaierror
# Patch out getfqdn to return some real string for when it gets called;
# some code (eg tokenization) gets mad w/ MagicMocks
mocket.getfqdn.return_value = "some.fake.fqdn"
mocket.gethostname.return_value = "local.fake.fqdn"
yield mocket
def load_config(name):
return SSHConfig.from_path(_config(name))
class TestSSHConfig:
def setup_method(self):
self.config = load_config("robey")
def test_init(self):
# No args!
with raises(TypeError):
SSHConfig("uh oh!")
# No args.
assert not SSHConfig()._config
def test_from_text(self):
config = SSHConfig.from_text("User foo")
assert config.lookup("foo.example.com")["user"] == "foo"
def test_from_file(self):
with open(_config("robey")) as flo:
config = SSHConfig.from_file(flo)
assert config.lookup("whatever")["user"] == "robey"
def test_from_path(self):
# NOTE: DO NOT replace with use of load_config() :D
config = SSHConfig.from_path(_config("robey"))
assert config.lookup("meh.example.com")["port"] == "3333"
def test_parse_config(self):
expected = [
{"host": ["*"], "config": {}},
{
"host": ["*"],
"config": {"identityfile": ["~/.ssh/id_rsa"], "user": "robey"},
},
{
"host": ["*.example.com"],
"config": {"user": "bjork", "port": "3333"},
},
{"host": ["*"], "config": {"crazy": "something dumb"}},
{
"host": ["spoo.example.com"],
"config": {"crazy": "something else"},
},
]
assert self.config._config == expected
@mark.parametrize(
"host,values",
(
(
"irc.danger.com",
{
"crazy": "something dumb",
"hostname": "irc.danger.com",
"user": "robey",
},
),
(
"irc.example.com",
{
"crazy": "something dumb",
"hostname": "irc.example.com",
"user": "robey",
"port": "3333",
},
),
(
"spoo.example.com",
{
"crazy": "something dumb",
"hostname": "spoo.example.com",
"user": "robey",
"port": "3333",
},
),
),
)
def test_host_config(self, host, values):
expected = dict(
values, hostname=host, identityfile=[expanduser("~/.ssh/id_rsa")]
)
assert self.config.lookup(host) == expected
def test_fabric_issue_33(self):
config = SSHConfig.from_text(
"""
Host www13.*
Port 22
Host *.example.com
Port 2222
Host *
Port 3333
"""
)
host = "www13.example.com"
expected = {"hostname": host, "port": "22"}
assert config.lookup(host) == expected
def test_proxycommand_config_equals_parsing(self):
"""
ProxyCommand should not split on equals signs within the value.
"""
config = SSHConfig.from_text(
"""
Host space-delimited
ProxyCommand foo bar=biz baz
Host equals-delimited
ProxyCommand=foo bar=biz baz
"""
)
for host in ("space-delimited", "equals-delimited"):
value = config.lookup(host)["proxycommand"]
assert value == "foo bar=biz baz"
def test_proxycommand_interpolation(self):
"""
ProxyCommand should perform interpolation on the value
"""
config = SSHConfig.from_text(
"""
Host specific
Port 37
ProxyCommand host %h port %p lol
Host portonly
Port 155
Host *
Port 25
ProxyCommand host %h port %p
"""
)
for host, val in (
("foo.com", "host foo.com port 25"),
("specific", "host specific port 37 lol"),
("portonly", "host portonly port 155"),
):
assert config.lookup(host)["proxycommand"] == val
def test_proxycommand_tilde_expansion(self):
"""
Tilde (~) should be expanded inside ProxyCommand
"""
config = SSHConfig.from_text(
"""
Host test
ProxyCommand ssh -F ~/.ssh/test_config bastion nc %h %p
"""
)
expected = "ssh -F {}/.ssh/test_config bastion nc test 22".format(
expanduser("~")
)
got = config.lookup("test")["proxycommand"]
assert got == expected
@patch("paramiko.config.getpass")
def test_proxyjump_token_expansion(self, getpass):
getpass.getuser.return_value = "gandalf"
config = SSHConfig.from_text(
"""
Host justhost
ProxyJump jumpuser@%h
Host userhost
ProxyJump %r@%h:222
Host allcustom
ProxyJump %r@%h:%p
"""
)
assert config.lookup("justhost")["proxyjump"] == "jumpuser@justhost"
assert config.lookup("userhost")["proxyjump"] == "gandalf@userhost:222"
assert (
config.lookup("allcustom")["proxyjump"] == "gandalf@allcustom:22"
)
@patch("paramiko.config.getpass")
def test_controlpath_token_expansion(self, getpass, socket):
getpass.getuser.return_value = "gandalf"
config = SSHConfig.from_text(
"""
Host explicit_user
User root
ControlPath user %u remoteuser %r
Host explicit_host
HostName ohai
ControlPath remoteuser %r host %h orighost %n
Host hashbrowns
ControlPath %C
"""
)
result = config.lookup("explicit_user")["controlpath"]
# Remote user is User val, local user is User val
assert result == "user gandalf remoteuser root"
result = config.lookup("explicit_host")["controlpath"]
# Remote user falls back to local user; host and orighost may differ
assert result == "remoteuser gandalf host ohai orighost explicit_host"
# Supports %C
result = config.lookup("hashbrowns")["controlpath"]
assert result == "a438e7dbf5308b923aba9db8fe2ca63447ac8688"
def test_negation(self):
config = SSHConfig.from_text(
"""
Host www13.* !*.example.com
Port 22
Host *.example.com !www13.*
Port 2222
Host www13.*
Port 8080
Host *
Port 3333
"""
)
host = "www13.example.com"
expected = {"hostname": host, "port": "8080"}
assert config.lookup(host) == expected
def test_proxycommand(self):
config = SSHConfig.from_text(
"""
Host proxy-with-equal-divisor-and-space
ProxyCommand = foo=bar
Host proxy-with-equal-divisor-and-no-space
ProxyCommand=foo=bar
Host proxy-without-equal-divisor
ProxyCommand foo=bar:%h-%p
"""
)
for host, values in {
"proxy-with-equal-divisor-and-space": {
"hostname": "proxy-with-equal-divisor-and-space",
"proxycommand": "foo=bar",
},
"proxy-with-equal-divisor-and-no-space": {
"hostname": "proxy-with-equal-divisor-and-no-space",
"proxycommand": "foo=bar",
},
"proxy-without-equal-divisor": {
"hostname": "proxy-without-equal-divisor",
"proxycommand": "foo=bar:proxy-without-equal-divisor-22",
},
}.items():
assert config.lookup(host) == values
@patch("paramiko.config.getpass")
def test_identityfile(self, getpass, socket):
getpass.getuser.return_value = "gandalf"
config = SSHConfig.from_text(
"""
IdentityFile id_dsa0
Host *
IdentityFile id_dsa1
Host dsa2
IdentityFile id_dsa2
Host dsa2*
IdentityFile id_dsa22
Host hashbrowns
IdentityFile %C
"""
)
for host, values in {
"foo": {"hostname": "foo", "identityfile": ["id_dsa0", "id_dsa1"]},
"dsa2": {
"hostname": "dsa2",
"identityfile": ["id_dsa0", "id_dsa1", "id_dsa2", "id_dsa22"],
},
"dsa22": {
"hostname": "dsa22",
"identityfile": ["id_dsa0", "id_dsa1", "id_dsa22"],
},
"hashbrowns": {
"hostname": "hashbrowns",
"identityfile": [
"id_dsa0",
"id_dsa1",
"a438e7dbf5308b923aba9db8fe2ca63447ac8688",
],
},
}.items():
assert config.lookup(host) == values
def test_config_addressfamily_and_lazy_fqdn(self):
"""
Ensure the code path honoring non-'all' AddressFamily doesn't asplode
"""
config = SSHConfig.from_text(
"""
AddressFamily inet
IdentityFile something_%l_using_fqdn
"""
)
assert config.lookup(
"meh"
) # will die during lookup() if bug regresses
def test_config_dos_crlf_succeeds(self):
config = SSHConfig.from_text(
"""
Host abcqwerty\r\nHostName 127.0.0.1\r\n
"""
)
assert config.lookup("abcqwerty")["hostname"] == "127.0.0.1"
def test_get_hostnames(self):
expected = {"*", "*.example.com", "spoo.example.com"}
assert self.config.get_hostnames() == expected
def test_quoted_host_names(self):
config = SSHConfig.from_text(
"""
Host "param pam" param "pam"
Port 1111
Host "param2"
Port 2222
Host param3 parara
Port 3333
Host param4 "p a r" "p" "par" para
Port 4444
"""
)
res = {
"param pam": {"hostname": "param pam", "port": "1111"},
"param": {"hostname": "param", "port": "1111"},
"pam": {"hostname": "pam", "port": "1111"},
"param2": {"hostname": "param2", "port": "2222"},
"param3": {"hostname": "param3", "port": "3333"},
"parara": {"hostname": "parara", "port": "3333"},
"param4": {"hostname": "param4", "port": "4444"},
"p a r": {"hostname": "p a r", "port": "4444"},
"p": {"hostname": "p", "port": "4444"},
"par": {"hostname": "par", "port": "4444"},
"para": {"hostname": "para", "port": "4444"},
}
for host, values in res.items():
assert config.lookup(host) == values
def test_quoted_params_in_config(self):
config = SSHConfig.from_text(
"""
Host "param pam" param "pam"
IdentityFile id_rsa
Host "param2"
IdentityFile "test rsa key"
Host param3 parara
IdentityFile id_rsa
IdentityFile "test rsa key"
"""
)
res = {
"param pam": {"hostname": "param pam", "identityfile": ["id_rsa"]},
"param": {"hostname": "param", "identityfile": ["id_rsa"]},
"pam": {"hostname": "pam", "identityfile": ["id_rsa"]},
"param2": {"hostname": "param2", "identityfile": ["test rsa key"]},
"param3": {
"hostname": "param3",
"identityfile": ["id_rsa", "test rsa key"],
},
"parara": {
"hostname": "parara",
"identityfile": ["id_rsa", "test rsa key"],
},
}
for host, values in res.items():
assert config.lookup(host) == values
def test_quoted_host_in_config(self):
conf = SSHConfig()
correct_data = {
"param": ["param"],
'"param"': ["param"],
"param pam": ["param", "pam"],
'"param" "pam"': ["param", "pam"],
'"param" pam': ["param", "pam"],
'param "pam"': ["param", "pam"],
'param "pam" p': ["param", "pam", "p"],
'"param" pam "p"': ["param", "pam", "p"],
'"pa ram"': ["pa ram"],
'"pa ram" pam': ["pa ram", "pam"],
'param "p a m"': ["param", "p a m"],
}
incorrect_data = ['param"', '"param', 'param "pam', 'param "pam" "p a']
for host, values in correct_data.items():
assert conf._get_hosts(host) == values
for host in incorrect_data:
with raises(ConfigParseError):
conf._get_hosts(host)
def test_invalid_line_format_excepts(self):
with raises(ConfigParseError):
load_config("invalid")
def test_proxycommand_none_issue_415(self):
config = SSHConfig.from_text(
"""
Host proxycommand-standard-none
ProxyCommand None
Host proxycommand-with-equals-none
ProxyCommand=None
"""
)
for host, values in {
"proxycommand-standard-none": {
"hostname": "proxycommand-standard-none",
"proxycommand": None,
},
"proxycommand-with-equals-none": {
"hostname": "proxycommand-with-equals-none",
"proxycommand": None,
},
}.items():
assert config.lookup(host) == values
def test_proxycommand_none_masking(self):
# Re: https://github.com/paramiko/paramiko/issues/670
config = SSHConfig.from_text(
"""
Host specific-host
ProxyCommand none
Host other-host
ProxyCommand other-proxy
Host *
ProxyCommand default-proxy
"""
)
# In versions <3.0, 'None' ProxyCommands got deleted, and this itself
# caused bugs. In 3.0, we more cleanly map "none" to None. This test
# has been altered accordingly but left around to ensure no
# regressions.
assert config.lookup("specific-host")["proxycommand"] is None
assert config.lookup("other-host")["proxycommand"] == "other-proxy"
cmd = config.lookup("some-random-host")["proxycommand"]
assert cmd == "default-proxy"
def test_hostname_tokenization(self):
result = load_config("hostname-tokenized").lookup("whatever")
assert result["hostname"] == "prefix.whatever"
class TestSSHConfigDict:
def test_SSHConfigDict_construct_empty(self):
assert not SSHConfigDict()
def test_SSHConfigDict_construct_from_list(self):
assert SSHConfigDict([(1, 2)])[1] == 2
def test_SSHConfigDict_construct_from_dict(self):
assert SSHConfigDict({1: 2})[1] == 2
@mark.parametrize("true_ish", ("yes", "YES", "Yes", True))
def test_SSHConfigDict_as_bool_true_ish(self, true_ish):
assert SSHConfigDict({"key": true_ish}).as_bool("key") is True
@mark.parametrize("false_ish", ("no", "NO", "No", False))
def test_SSHConfigDict_as_bool(self, false_ish):
assert SSHConfigDict({"key": false_ish}).as_bool("key") is False
@mark.parametrize("int_val", ("42", 42))
def test_SSHConfigDict_as_int(self, int_val):
assert SSHConfigDict({"key": int_val}).as_int("key") == 42
@mark.parametrize("non_int", ("not an int", None, object()))
def test_SSHConfigDict_as_int_failures(self, non_int):
conf = SSHConfigDict({"key": non_int})
try:
int(non_int)
except Exception as e:
exception_type = type(e)
with raises(exception_type):
conf.as_int("key")
def test_SSHConfig_host_dicts_are_SSHConfigDict_instances(self):
config = SSHConfig.from_text(
"""
Host *.example.com
Port 2222
Host *
Port 3333
"""
)
assert config.lookup("foo.example.com").as_int("port") == 2222
def test_SSHConfig_wildcard_host_dicts_are_SSHConfigDict_instances(self):
config = SSHConfig.from_text(
"""
Host *.example.com
Port 2222
Host *
Port 3333
"""
)
assert config.lookup("anything-else").as_int("port") == 3333
class TestHostnameCanonicalization:
# NOTE: this class uses on-disk configs, and ones with real (at time of
# writing) DNS names, so that one can easily test OpenSSH's behavior using
# "ssh -F path/to/file.config -G <target>".
def test_off_by_default(self, socket):
result = load_config("basic").lookup("www")
assert result["hostname"] == "www"
assert "user" not in result
assert not socket.gethostbyname.called
def test_explicit_no_same_as_default(self, socket):
result = load_config("no-canon").lookup("www")
assert result["hostname"] == "www"
assert "user" not in result
assert not socket.gethostbyname.called
@mark.parametrize(
"config_name",
("canon", "canon-always", "canon-local", "canon-local-always"),
)
def test_canonicalization_base_cases(self, socket, config_name):
result = load_config(config_name).lookup("www")
assert result["hostname"] == "www.paramiko.org"
assert result["user"] == "rando"
socket.gethostbyname.assert_called_once_with("www.paramiko.org")
def test_uses_getaddrinfo_when_AddressFamily_given(self, socket):
# Undo default 'always fails' mock
socket.getaddrinfo.side_effect = None
socket.getaddrinfo.return_value = [True] # just need 1st value truthy
result = load_config("canon-ipv4").lookup("www")
assert result["hostname"] == "www.paramiko.org"
assert result["user"] == "rando"
assert not socket.gethostbyname.called
gai_args = socket.getaddrinfo.call_args[0]
assert gai_args[0] == "www.paramiko.org"
assert gai_args[2] is socket.AF_INET # Mocked, but, still useful
@mark.skip
def test_empty_CanonicalDomains_canonicalizes_despite_noop(self, socket):
# Confirmed this is how OpenSSH behaves as well. Bit silly, but.
# TODO: this requires modifying SETTINGS_REGEX, which is a mite scary
# (honestly I'd prefer to move to a real parser lib anyhow) and since
# this is a very dumb corner case, it's marked skip for now.
result = load_config("empty-canon").lookup("www")
assert result["hostname"] == "www" # no paramiko.org
assert "user" not in result # did not discover canonicalized block
def test_CanonicalDomains_may_be_set_to_space_separated_list(self, socket):
# Test config has a bogus domain, followed by paramiko.org
socket.gethostbyname.side_effect = [socket.gaierror, True]
result = load_config("multi-canon-domains").lookup("www")
assert result["hostname"] == "www.paramiko.org"
assert result["user"] == "rando"
assert [x[0][0] for x in socket.gethostbyname.call_args_list] == [
"www.not-a-real-tld",
"www.paramiko.org",
]
def test_canonicalization_applies_to_single_dot_by_default(self, socket):
result = load_config("deep-canon").lookup("sub.www")
assert result["hostname"] == "sub.www.paramiko.org"
assert result["user"] == "deep"
def test_canonicalization_not_applied_to_two_dots_by_default(self, socket):
result = load_config("deep-canon").lookup("subber.sub.www")
assert result["hostname"] == "subber.sub.www"
assert "user" not in result
def test_hostname_depth_controllable_with_max_dots_directive(self, socket):
# This config sets MaxDots of 2, so now canonicalization occurs
result = load_config("deep-canon-maxdots").lookup("subber.sub.www")
assert result["hostname"] == "subber.sub.www.paramiko.org"
assert result["user"] == "deeper"
def test_max_dots_may_be_zero(self, socket):
result = load_config("zero-maxdots").lookup("sub.www")
assert result["hostname"] == "sub.www"
assert "user" not in result
def test_fallback_yes_does_not_canonicalize_or_error(self, socket):
socket.gethostbyname.side_effect = socket.gaierror
result = load_config("fallback-yes").lookup("www")
assert result["hostname"] == "www"
assert "user" not in result
def test_fallback_no_causes_errors_for_unresolvable_names(self, socket):
socket.gethostbyname.side_effect = socket.gaierror
with raises(CouldNotCanonicalize) as info:
load_config("fallback-no").lookup("doesnotexist")
assert str(info.value) == "doesnotexist"
def test_identityfile_continues_being_appended_to(self, socket):
result = load_config("canon").lookup("www")
assert result["identityfile"] == ["base.key", "canonicalized.key"]
@mark.skip
class TestCanonicalizationOfCNAMEs:
def test_permitted_cnames_may_be_one_to_one_mapping(self):
# CanonicalizePermittedCNAMEs *.foo.com:*.bar.com
pass
def test_permitted_cnames_may_be_one_to_many_mapping(self):
# CanonicalizePermittedCNAMEs *.foo.com:*.bar.com,*.biz.com
pass
def test_permitted_cnames_may_be_many_to_one_mapping(self):
# CanonicalizePermittedCNAMEs *.foo.com,*.bar.com:*.biz.com
pass
def test_permitted_cnames_may_be_many_to_many_mapping(self):
# CanonicalizePermittedCNAMEs *.foo.com,*.bar.com:*.biz.com,*.baz.com
pass
def test_permitted_cnames_may_be_multiple_mappings(self):
# CanonicalizePermittedCNAMEs *.foo.com,*.bar.com *.biz.com:*.baz.com
pass
def test_permitted_cnames_may_be_multiple_complex_mappings(self):
# Same as prev but with multiple patterns on both ends in both args
pass
class TestMatchAll:
def test_always_matches(self):
result = load_config("match-all").lookup("general")
assert result["user"] == "awesome"
def test_may_not_mix_with_non_canonical_keywords(self):
for config in ("match-all-and-more", "match-all-and-more-before"):
with raises(ConfigParseError):
load_config(config).lookup("whatever")
def test_may_come_after_canonical(self, socket):
result = load_config("match-all-after-canonical").lookup("www")
assert result["user"] == "awesome"
def test_may_not_come_before_canonical(self, socket):
with raises(ConfigParseError):
load_config("match-all-before-canonical")
def test_after_canonical_not_loaded_when_non_canonicalized(self, socket):
result = load_config("match-canonical-no").lookup("a-host")
assert "user" not in result
def _expect(success_on):
"""
Returns a side_effect-friendly Invoke success result for given command(s).
Ensures that any other commands fail; this is useful for testing 'Match
exec' because it means all other such clauses under test act like no-ops.
:param success_on:
Single string or list of strings, noting commands that should appear to
succeed.
"""
if isinstance(success_on, str):
success_on = [success_on]
def inner(command, *args, **kwargs):
# Sanity checking - we always expect that invoke.run is called with
# these.
assert kwargs.get("hide", None) == "stdout"
assert kwargs.get("warn", None) is True
# Fake exit
exit = 0 if command in success_on else 1
return Result(exited=exit)
return inner
@mark.skipif(Result is None, reason="requires invoke package")
class TestMatchExec:
@patch("paramiko.config.invoke", new=None)
@patch("paramiko.config.invoke_import_error", new=ImportError("meh"))
def test_raises_invoke_ImportErrors_at_runtime(self):
# Not an ideal test, but I don't know of a non-bad way to fake out
# module-time ImportErrors. So we mock the symptoms. Meh!
with raises(ImportError) as info:
load_config("match-exec").lookup("oh-noes")
assert str(info.value) == "meh"
@patch("paramiko.config.invoke.run")
@mark.parametrize(
"cmd,user",
[
("unquoted", "rando"),
("quoted", "benjamin"),
("quoted spaced", "neil"),
],
)
def test_accepts_single_possibly_quoted_argument(self, run, cmd, user):
run.side_effect = _expect(cmd)
result = load_config("match-exec").lookup("whatever")
assert result["user"] == user
@patch("paramiko.config.invoke.run")
def test_does_not_match_nonzero_exit_codes(self, run):
# Nothing will succeed -> no User ever gets loaded
run.return_value = Result(exited=1)
result = load_config("match-exec").lookup("whatever")
assert "user" not in result
@patch("paramiko.config.getpass")
@patch("paramiko.config.invoke.run")
def test_tokenizes_argument(self, run, getpass, socket):
getpass.getuser.return_value = "gandalf"
# Actual exec value is "%C %d %h %L %l %n %p %r %u"
parts = (
"bf5ba06778434a9384ee4217e462f64888bd0cd2",
expanduser("~"),
"configured",
"local",
"some.fake.fqdn",
"target",
"22",
"intermediate",
"gandalf",
)
run.side_effect = _expect(" ".join(parts))
result = load_config("match-exec").lookup("target")
assert result["port"] == "1337"
@patch("paramiko.config.invoke.run")
def test_works_with_canonical(self, run, socket):
# Ensure both stanzas' exec components appear to match
run.side_effect = _expect(["uncanonicalized", "canonicalized"])
result = load_config("match-exec-canonical").lookup("who-cares")
# Prove both config values got loaded up, across the two passes
assert result["user"] == "defenseless"
assert result["port"] == "8007"
@patch("paramiko.config.invoke.run")
def test_may_be_negated(self, run):
run.side_effect = _expect("this succeeds")
result = load_config("match-exec-negation").lookup("so-confusing")
# If negation did not work, the first of the two Match exec directives
# would have set User to 'nope' (and/or the second would have NOT set
# User to 'yup')
assert result["user"] == "yup"
def test_requires_an_argument(self):
with raises(ConfigParseError):
load_config("match-exec-no-arg")
@patch("paramiko.config.invoke.run")
def test_works_with_tokenized_hostname(self, run):
run.side_effect = _expect("ping target")
result = load_config("hostname-exec-tokenized").lookup("target")
assert result["hostname"] == "pingable.target"
class TestMatchHost:
def test_matches_target_name_when_no_hostname(self):
result = load_config("match-host").lookup("target")
assert result["user"] == "rand"
def test_matches_hostname_from_global_setting(self):
# Also works for ones set in regular Host stanzas
result = load_config("match-host-name").lookup("anything")
assert result["user"] == "silly"
def test_matches_hostname_from_earlier_match(self):
# Corner case: one Match matches original host, sets HostName,
# subsequent Match matches the latter.
result = load_config("match-host-from-match").lookup("original-host")
assert result["user"] == "inner"
def test_may_be_globbed(self):
result = load_config("match-host-glob-list").lookup("whatever")
assert result["user"] == "matrim"
def test_may_be_comma_separated_list(self):
for target in ("somehost", "someotherhost"):
result = load_config("match-host-glob-list").lookup(target)
assert result["user"] == "thom"
def test_comma_separated_list_may_have_internal_negation(self):
conf = load_config("match-host-glob-list")
assert conf.lookup("good")["user"] == "perrin"
assert "user" not in conf.lookup("goof")
def test_matches_canonicalized_name(self, socket):
# Without 'canonical' explicitly declared, mind.
result = load_config("match-host-canonicalized").lookup("www")
assert result["user"] == "rand"
def test_works_with_canonical_keyword(self, socket):
# NOTE: distinct from 'happens to be canonicalized' above
result = load_config("match-host-canonicalized").lookup("docs")
assert result["user"] == "eric"
def test_may_be_negated(self):
conf = load_config("match-host-negated")
assert conf.lookup("docs")["user"] == "jeff"
assert "user" not in conf.lookup("www")
def test_requires_an_argument(self):
with raises(ConfigParseError):
load_config("match-host-no-arg")
class TestMatchOriginalHost:
def test_matches_target_host_not_hostname(self):
result = load_config("match-orighost").lookup("target")
assert result["hostname"] == "bogus"
assert result["user"] == "tuon"
def test_matches_target_host_not_canonicalized_name(self, socket):
result = load_config("match-orighost-canonical").lookup("www")
assert result["hostname"] == "www.paramiko.org"
assert result["user"] == "tuon"
def test_may_be_globbed(self):
result = load_config("match-orighost").lookup("whatever")
assert result["user"] == "matrim"
def test_may_be_comma_separated_list(self):
for target in ("comma", "separated"):
result = load_config("match-orighost").lookup(target)
assert result["user"] == "chameleon"
def test_comma_separated_list_may_have_internal_negation(self):
result = load_config("match-orighost").lookup("nope")
assert "user" not in result
def test_may_be_negated(self):
result = load_config("match-orighost").lookup("docs")
assert result["user"] == "thom"
def test_requires_an_argument(self):
with raises(ConfigParseError):
load_config("match-orighost-no-arg")
class TestMatchUser:
def test_matches_configured_username(self):
result = load_config("match-user-explicit").lookup("anything")
assert result["hostname"] == "dumb"
@patch("paramiko.config.getpass.getuser")
def test_matches_local_username_by_default(self, getuser):
getuser.return_value = "gandalf"
result = load_config("match-user").lookup("anything")
assert result["hostname"] == "gondor"
@patch("paramiko.config.getpass.getuser")
def test_may_be_globbed(self, getuser):
for user in ("bilbo", "bombadil"):
getuser.return_value = user
result = load_config("match-user").lookup("anything")
assert result["hostname"] == "shire"
@patch("paramiko.config.getpass.getuser")
def test_may_be_comma_separated_list(self, getuser):
for user in ("aragorn", "frodo"):
getuser.return_value = user
result = load_config("match-user").lookup("anything")
assert result["hostname"] == "moria"
@patch("paramiko.config.getpass.getuser")
def test_comma_separated_list_may_have_internal_negation(self, getuser):
getuser.return_value = "legolas"
result = load_config("match-user").lookup("anything")
assert "port" not in result
getuser.return_value = "gimli"
result = load_config("match-user").lookup("anything")
assert result["port"] == "7373"
@patch("paramiko.config.getpass.getuser")
def test_may_be_negated(self, getuser):
getuser.return_value = "saruman"
result = load_config("match-user").lookup("anything")
assert result["hostname"] == "mordor"
def test_requires_an_argument(self):
with raises(ConfigParseError):
load_config("match-user-no-arg")
# NOTE: highly derivative of previous suite due to the former's use of
# localuser fallback. Doesn't seem worth conflating/refactoring right now.
class TestMatchLocalUser:
@patch("paramiko.config.getpass.getuser")
def test_matches_local_username(self, getuser):
getuser.return_value = "gandalf"
result = load_config("match-localuser").lookup("anything")
assert result["hostname"] == "gondor"
@patch("paramiko.config.getpass.getuser")
def test_may_be_globbed(self, getuser):
for user in ("bilbo", "bombadil"):
getuser.return_value = user
result = load_config("match-localuser").lookup("anything")
assert result["hostname"] == "shire"
@patch("paramiko.config.getpass.getuser")
def test_may_be_comma_separated_list(self, getuser):
for user in ("aragorn", "frodo"):
getuser.return_value = user
result = load_config("match-localuser").lookup("anything")
assert result["hostname"] == "moria"
@patch("paramiko.config.getpass.getuser")
def test_comma_separated_list_may_have_internal_negation(self, getuser):
getuser.return_value = "legolas"
result = load_config("match-localuser").lookup("anything")
assert "port" not in result
getuser.return_value = "gimli"
result = load_config("match-localuser").lookup("anything")
assert result["port"] == "7373"
@patch("paramiko.config.getpass.getuser")
def test_may_be_negated(self, getuser):
getuser.return_value = "saruman"
result = load_config("match-localuser").lookup("anything")
assert result["hostname"] == "mordor"
def test_requires_an_argument(self):
with raises(ConfigParseError):
load_config("match-localuser-no-arg")
class TestComplexMatching:
# NOTE: this is still a cherry-pick of a few levels of complexity, there's
# no point testing literally all possible combinations.
def test_originalhost_host(self):
result = load_config("match-complex").lookup("target")
assert result["hostname"] == "bogus"
assert result["user"] == "rand"
@patch("paramiko.config.getpass.getuser")
def test_originalhost_localuser(self, getuser):
getuser.return_value = "rando"
result = load_config("match-complex").lookup("remote")
assert result["user"] == "calrissian"
@patch("paramiko.config.getpass.getuser")
def test_everything_but_all(self, getuser):
getuser.return_value = "rando"
result = load_config("match-complex").lookup("www")
assert result["port"] == "7777"
@patch("paramiko.config.getpass.getuser")
def test_everything_but_all_with_some_negated(self, getuser):
getuser.return_value = "rando"
result = load_config("match-complex").lookup("docs")
assert result["port"] == "1234"
def test_negated_canonical(self, socket):
# !canonical in a config that is not canonicalized - does match
result = load_config("match-canonical-no").lookup("specific")
assert result["user"] == "overload"
# !canonical in a config that is canonicalized - does NOT match
result = load_config("match-canonical-yes").lookup("www")
assert result["user"] == "hidden"
class TestFinalMatching(object):
def test_finally(self):
result = load_config("match-final").lookup("finally")
assert result["proxyjump"] == "jump"
assert result["port"] == "1001"
def test_default_port(self):
result = load_config("match-final").lookup("default-port")
assert result["proxyjump"] == "jump"
assert result["port"] == "1002"
def test_negated(self):
result = load_config("match-final").lookup("jump")
assert result["port"] == "1003"
| 36,596 | Python | .py | 882 | 32.993197 | 79 | 0.611708 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
579 | test_kex_gss.py | paramiko_paramiko/tests/test_kex_gss.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
# Copyright (C) 2013-2014 science + computing ag
# Author: Sebastian Deiss <[email protected]>
#
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Unit Tests for the GSS-API / SSPI SSHv2 Diffie-Hellman Key Exchange and user
authentication
"""
import socket
import threading
import unittest
import paramiko
from ._util import needs_gssapi, KerberosTestCase, update_env, _support
class NullServer(paramiko.ServerInterface):
def get_allowed_auths(self, username):
return "gssapi-keyex"
def check_auth_gssapi_keyex(
self, username, gss_authenticated=paramiko.AUTH_FAILED, cc_file=None
):
if gss_authenticated == paramiko.AUTH_SUCCESSFUL:
return paramiko.AUTH_SUCCESSFUL
return paramiko.AUTH_FAILED
def enable_auth_gssapi(self):
UseGSSAPI = True
return UseGSSAPI
def check_channel_request(self, kind, chanid):
return paramiko.OPEN_SUCCEEDED
def check_channel_exec_request(self, channel, command):
if command != b"yes":
return False
return True
@needs_gssapi
class GSSKexTest(KerberosTestCase):
def setUp(self):
self.username = self.realm.user_princ
self.hostname = socket.getfqdn(self.realm.hostname)
self.sockl = socket.socket()
self.sockl.bind((self.realm.hostname, 0))
self.sockl.listen(1)
self.addr, self.port = self.sockl.getsockname()
self.event = threading.Event()
update_env(self, self.realm.env)
thread = threading.Thread(target=self._run)
thread.start()
def tearDown(self):
for attr in "tc ts socks sockl".split():
if hasattr(self, attr):
getattr(self, attr).close()
def _run(self):
self.socks, addr = self.sockl.accept()
self.ts = paramiko.Transport(self.socks, gss_kex=True)
host_key = paramiko.RSAKey.from_private_key_file(_support("rsa.key"))
self.ts.add_server_key(host_key)
self.ts.set_gss_host(self.realm.hostname)
try:
self.ts.load_server_moduli()
except:
print("(Failed to load moduli -- gex will be unsupported.)")
server = NullServer()
self.ts.start_server(self.event, server)
def _test_gsskex_and_auth(self, gss_host, rekey=False):
"""
Verify that Paramiko can handle SSHv2 GSS-API / SSPI authenticated
Diffie-Hellman Key Exchange and user authentication with the GSS-API
context created during key exchange.
"""
host_key = paramiko.RSAKey.from_private_key_file(_support("rsa.key"))
public_host_key = paramiko.RSAKey(data=host_key.asbytes())
self.tc = paramiko.SSHClient()
self.tc.get_host_keys().add(
f"[{self.hostname}]:{self.port}", "ssh-rsa", public_host_key
)
self.tc.connect(
self.hostname,
self.port,
username=self.username,
gss_auth=True,
gss_kex=True,
gss_host=gss_host,
)
self.event.wait(1.0)
self.assert_(self.event.is_set())
self.assert_(self.ts.is_active())
self.assertEquals(self.username, self.ts.get_username())
self.assertEquals(True, self.ts.is_authenticated())
self.assertEquals(True, self.tc.get_transport().gss_kex_used)
stdin, stdout, stderr = self.tc.exec_command("yes")
schan = self.ts.accept(1.0)
if rekey:
self.tc.get_transport().renegotiate_keys()
schan.send("Hello there.\n")
schan.send_stderr("This is on stderr.\n")
schan.close()
self.assertEquals("Hello there.\n", stdout.readline())
self.assertEquals("", stdout.readline())
self.assertEquals("This is on stderr.\n", stderr.readline())
self.assertEquals("", stderr.readline())
stdin.close()
stdout.close()
stderr.close()
def test_gsskex_and_auth(self):
"""
Verify that Paramiko can handle SSHv2 GSS-API / SSPI authenticated
Diffie-Hellman Key Exchange and user authentication with the GSS-API
context created during key exchange.
"""
self._test_gsskex_and_auth(gss_host=None)
# To be investigated, see https://github.com/paramiko/paramiko/issues/1312
@unittest.expectedFailure
def test_gsskex_and_auth_rekey(self):
"""
Verify that Paramiko can rekey.
"""
self._test_gsskex_and_auth(gss_host=None, rekey=True)
| 5,302 | Python | .py | 130 | 33.538462 | 79 | 0.663947 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
580 | _loop.py | paramiko_paramiko/tests/_loop.py | # Copyright (C) 2003-2009 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import socket
import threading
from paramiko.util import asbytes
class LoopSocket:
"""
A LoopSocket looks like a normal socket, but all data written to it is
delivered on the read-end of another LoopSocket, and vice versa. It's
like a software "socketpair".
"""
def __init__(self):
self.__in_buffer = bytes()
self.__lock = threading.Lock()
self.__cv = threading.Condition(self.__lock)
self.__timeout = None
self.__mate = None
self._closed = False
def close(self):
self.__unlink()
self._closed = True
try:
self.__lock.acquire()
self.__in_buffer = bytes()
finally:
self.__lock.release()
def send(self, data):
data = asbytes(data)
if self.__mate is None:
# EOF
raise EOFError()
self.__mate.__feed(data)
return len(data)
def recv(self, n):
self.__lock.acquire()
try:
if self.__mate is None:
# EOF
return bytes()
if len(self.__in_buffer) == 0:
self.__cv.wait(self.__timeout)
if len(self.__in_buffer) == 0:
raise socket.timeout
out = self.__in_buffer[:n]
self.__in_buffer = self.__in_buffer[n:]
return out
finally:
self.__lock.release()
def settimeout(self, n):
self.__timeout = n
def link(self, other):
self.__mate = other
self.__mate.__mate = self
def __feed(self, data):
self.__lock.acquire()
try:
self.__in_buffer += data
self.__cv.notify_all()
finally:
self.__lock.release()
def __unlink(self):
m = None
self.__lock.acquire()
try:
if self.__mate is not None:
m = self.__mate
self.__mate = None
finally:
self.__lock.release()
if m is not None:
m.__unlink()
| 2,875 | Python | .py | 86 | 25.453488 | 79 | 0.584804 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
581 | agent.py | paramiko_paramiko/tests/agent.py | from unittest.mock import Mock
from pytest import mark, raises
from paramiko import AgentKey, Message, RSAKey
from paramiko.agent import (
SSH2_AGENT_SIGN_RESPONSE,
SSH_AGENT_RSA_SHA2_256,
SSH_AGENT_RSA_SHA2_512,
cSSH2_AGENTC_SIGN_REQUEST,
)
from ._util import _support
# AgentKey with no inner_key
class _BareAgentKey(AgentKey):
def __init__(self, name, blob):
self.name = name
self.blob = blob
self.inner_key = None
class AgentKey_:
def str_is_repr(self):
# Tests for a missed spot in Python 3 upgrades: AgentKey.__str__ was
# returning bytes, as if under Python 2. When bug present, this
# explodes with "__str__ returned non-string".
key = AgentKey(None, b"secret!!!")
assert str(key) == repr(key)
class init:
def needs_at_least_two_arguments(self):
with raises(TypeError):
AgentKey()
with raises(TypeError):
AgentKey(None)
def sets_attributes_and_parses_blob(self):
agent = Mock()
blob = Message()
blob.add_string("bad-type")
key = AgentKey(agent=agent, blob=bytes(blob))
assert key.agent is agent
assert key.name == "bad-type"
assert key.blob == bytes(blob)
assert key.comment == "" # default
# TODO: logger testing
assert key.inner_key is None # no 'bad-type' algorithm
def comment_optional(self):
blob = Message()
blob.add_string("bad-type")
key = AgentKey(agent=Mock(), blob=bytes(blob), comment="hi!")
assert key.comment == "hi!"
def sets_inner_key_when_known_type(self, keys):
key = AgentKey(agent=Mock(), blob=bytes(keys.pkey))
assert key.inner_key == keys.pkey
class fields:
def defaults_to_get_name_and_blob(self):
key = _BareAgentKey(name="lol", blob=b"lmao")
assert key._fields == ["lol", b"lmao"]
# TODO: pytest-relaxed is buggy (now?), this shows up under get_bits?
def defers_to_inner_key_when_present(self, keys):
key = AgentKey(agent=None, blob=keys.pkey.asbytes())
assert key._fields == keys.pkey._fields
assert key == keys.pkey
class get_bits:
def defaults_to_superclass_implementation(self):
# TODO 4.0: assert raises NotImplementedError like changed parent?
assert _BareAgentKey(None, None).get_bits() == 0
def defers_to_inner_key_when_present(self, keys):
key = AgentKey(agent=None, blob=keys.pkey.asbytes())
assert key.get_bits() == keys.pkey.get_bits()
class asbytes:
def defaults_to_owned_blob(self):
blob = Mock()
assert _BareAgentKey(name=None, blob=blob).asbytes() is blob
def defers_to_inner_key_when_present(self, keys):
key = AgentKey(agent=None, blob=keys.pkey_with_cert.asbytes())
# Artificially make outer key blob != inner key blob; comment in
# AgentKey.asbytes implies this can sometimes really happen but I
# no longer recall when that could be?
key.blob = b"nope"
assert key.asbytes() == key.inner_key.asbytes()
@mark.parametrize(
"sign_kwargs,expected_flag",
[
# No algorithm kwarg: no flags (bitfield -> 0 int)
(dict(), 0),
(dict(algorithm="rsa-sha2-256"), SSH_AGENT_RSA_SHA2_256),
(dict(algorithm="rsa-sha2-512"), SSH_AGENT_RSA_SHA2_512),
# TODO: ideally we only send these when key is a cert,
# but it doesn't actually break when not; meh. Really just wants
# all the parameterization of this test rethought.
(
dict(algorithm="[email protected]"),
SSH_AGENT_RSA_SHA2_256,
),
(
dict(algorithm="[email protected]"),
SSH_AGENT_RSA_SHA2_512,
),
],
)
def signing_data(self, sign_kwargs, expected_flag):
class FakeAgent:
def _send_message(self, msg):
# The thing we actually care most about, we're not testing
# ssh-agent itself here
self._sent_message = msg
sig = Message()
sig.add_string("lol")
sig.rewind()
return SSH2_AGENT_SIGN_RESPONSE, sig
for do_cert in (False, True):
agent = FakeAgent()
# Get key kinda like how a real agent would give it to us - if
# cert, it'd be the entire public blob, not just the pubkey. This
# ensures the code under test sends _just the pubkey part_ back to
# the agent during signature requests (bug was us sending _the
# entire cert blob_, which somehow "worked ok" but always got us
# SHA1)
# NOTE: using lower level loader to avoid auto-cert-load when
# testing regular key (agents expose them separately)
inner_key = RSAKey.from_private_key_file(_support("rsa.key"))
blobby = inner_key.asbytes()
# NOTE: expected key blob always wants to be the real key, even
# when the "key" is a certificate.
expected_request_key_blob = blobby
if do_cert:
inner_key.load_certificate(_support("rsa.key-cert.pub"))
blobby = inner_key.public_blob.key_blob
key = AgentKey(agent, blobby)
result = key.sign_ssh_data(b"data-to-sign", **sign_kwargs)
assert result == b"lol"
msg = agent._sent_message
msg.rewind()
assert msg.get_byte() == cSSH2_AGENTC_SIGN_REQUEST
assert msg.get_string() == expected_request_key_blob
assert msg.get_string() == b"data-to-sign"
assert msg.get_int() == expected_flag
| 6,046 | Python | .py | 132 | 34.537879 | 78 | 0.583376 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
582 | test_hostkeys.py | paramiko_paramiko/tests/test_hostkeys.py | # Copyright (C) 2006-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Some unit tests for HostKeys.
"""
from base64 import decodebytes
from binascii import hexlify
import os
import unittest
import paramiko
test_hosts_file = """\
secure.example.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEA1PD6U2/TVxET6lkpKhOk5r\
9q/kAYG6sP9f5zuUYP8i7FOFp/6ncCEbbtg/lB+A3iidyxoSWl+9jtoyyDOOVX4UIDV9G11Ml8om3\
D+jrpI9cycZHqilK0HmxDeCuxbwyMuaCygU9gS2qoRvNLWZk70OpIKSSpBo0Wl3/XUmz9uhc=
broken.example.com ssh-rsa AAAA
happy.example.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEA8bP1ZA7DCZDB9J0s50l31M\
BGQ3GQ/Fc7SX6gkpXkwcZryoi4kNFhHu5LvHcZPdxXV1D+uTMfGS1eyd2Yz/DoNWXNAl8TI0cAsW\
5ymME3bQ4J/k1IKxCtz/bAlAqFgKoc+EolMziDYqWIATtW0rYTJvzGAzTmMj80/QpsFH+Pc2M=
modern.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKHEChAIxsh2hr8Q\
+Ea1AAHZyfEB2elEc2YgduVzBtp+
curvy.example.com ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlz\
dHAyNTYAAABBBAa+pY7djSpbg5viAcZhPt56AO3U3Sd7h7dnlUp0EjfDgyYHYQxl2QZ4JGgfwR5iv9\
T9iRZjQzvJd5s+kBAZtpk=
"""
test_hosts_file_tabs = """\
secure.example.com\tssh-rsa\tAAAAB3NzaC1yc2EAAAABIwAAAIEA1PD6U2/TVxET6lkpKhOk5r\
9q/kAYG6sP9f5zuUYP8i7FOFp/6ncCEbbtg/lB+A3iidyxoSWl+9jtoyyDOOVX4UIDV9G11Ml8om3\
D+jrpI9cycZHqilK0HmxDeCuxbwyMuaCygU9gS2qoRvNLWZk70OpIKSSpBo0Wl3/XUmz9uhc=
happy.example.com\tssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEA8bP1ZA7DCZDB9J0s50l31M\
BGQ3GQ/Fc7SX6gkpXkwcZryoi4kNFhHu5LvHcZPdxXV1D+uTMfGS1eyd2Yz/DoNWXNAl8TI0cAsW\
5ymME3bQ4J/k1IKxCtz/bAlAqFgKoc+EolMziDYqWIATtW0rYTJvzGAzTmMj80/QpsFH+Pc2M=
modern.example.com\tssh-ed25519\tAAAAC3NzaC1lZDI1NTE5AAAAIKHEChAIxsh2hr8Q\
+Ea1AAHZyfEB2elEc2YgduVzBtp+
curvy.example.com\tecdsa-sha2-nistp256\tAAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbml\
zdHAyNTYAAABBBAa+pY7djSpbg5viAcZhPt56AO3U3Sd7h7dnlUp0EjfDgyYHYQxl2QZ4JGgfwR5iv\
9T9iRZjQzvJd5s+kBAZtpk=
"""
keyblob = b"""\
AAAAB3NzaC1yc2EAAAABIwAAAIEA8bP1ZA7DCZDB9J0s50l31MBGQ3GQ/Fc7SX6gkpXkwcZryoi4k\
NFhHu5LvHcZPdxXV1D+uTMfGS1eyd2Yz/DoNWXNAl8TI0cAsW5ymME3bQ4J/k1IKxCtz/bAlAqFgK\
oc+EolMziDYqWIATtW0rYTJvzGAzTmMj80/QpsFH+Pc2M="""
keyblob_dss = b"""\
AAAAB3NzaC1kc3MAAACBAOeBpgNnfRzr/twmAQRu2XwWAp3CFtrVnug6s6fgwj/oLjYbVtjAy6pl/\
h0EKCWx2rf1IetyNsTxWrniA9I6HeDj65X1FyDkg6g8tvCnaNB8Xp/UUhuzHuGsMIipRxBxw9LF60\
8EqZcj1E3ytktoW5B5OcjrkEoz3xG7C+rpIjYvAAAAFQDwz4UnmsGiSNu5iqjn3uTzwUpshwAAAIE\
AkxfFeY8P2wZpDjX0MimZl5wkoFQDL25cPzGBuB4OnB8NoUk/yjAHIIpEShw8V+LzouMK5CTJQo5+\
Ngw3qIch/WgRmMHy4kBq1SsXMjQCte1So6HBMvBPIW5SiMTmjCfZZiw4AYHK+B/JaOwaG9yRg2Ejg\
4Ok10+XFDxlqZo8Y+wAAACARmR7CCPjodxASvRbIyzaVpZoJ/Z6x7dAumV+ysrV1BVYd0lYukmnjO\
1kKBWApqpH1ve9XDQYN8zgxM4b16L21kpoWQnZtXrY3GZ4/it9kUgyB7+NwacIBlXa8cMDL7Q/69o\
0d54U0X/NeX5QxuYR6OMJlrkQB7oiW/P/1mwjQgE="""
class HostKeysTest(unittest.TestCase):
def setUp(self):
with open("hostfile.temp", "w") as f:
f.write(test_hosts_file)
def tearDown(self):
os.unlink("hostfile.temp")
def test_load(self):
hostdict = paramiko.HostKeys("hostfile.temp")
assert len(hostdict) == 4
self.assertEqual(1, len(list(hostdict.values())[0]))
self.assertEqual(1, len(list(hostdict.values())[1]))
fp = hexlify(
hostdict["secure.example.com"]["ssh-rsa"].get_fingerprint()
).upper()
self.assertEqual(b"E6684DB30E109B67B70FF1DC5C7F1363", fp)
def test_add(self):
hostdict = paramiko.HostKeys("hostfile.temp")
hh = "|1|BMsIC6cUIP2zBuXR3t2LRcJYjzM=|hpkJMysjTk/+zzUUzxQEa2ieq6c="
key = paramiko.RSAKey(data=decodebytes(keyblob))
hostdict.add(hh, "ssh-rsa", key)
assert len(hostdict) == 5
x = hostdict["foo.example.com"]
fp = hexlify(x["ssh-rsa"].get_fingerprint()).upper()
self.assertEqual(b"7EC91BB336CB6D810B124B1353C32396", fp)
self.assertTrue(hostdict.check("foo.example.com", key))
def test_dict(self):
hostdict = paramiko.HostKeys("hostfile.temp")
self.assertTrue("secure.example.com" in hostdict)
self.assertTrue("not.example.com" not in hostdict)
self.assertTrue("secure.example.com" in hostdict)
self.assertTrue("not.example.com" not in hostdict)
x = hostdict.get("secure.example.com", None)
self.assertTrue(x is not None)
fp = hexlify(x["ssh-rsa"].get_fingerprint()).upper()
self.assertEqual(b"E6684DB30E109B67B70FF1DC5C7F1363", fp)
assert list(hostdict) == hostdict.keys()
assert len(list(hostdict)) == len(hostdict.keys()) == 4
def test_dict_set(self):
hostdict = paramiko.HostKeys("hostfile.temp")
key = paramiko.RSAKey(data=decodebytes(keyblob))
key_dss = paramiko.DSSKey(data=decodebytes(keyblob_dss))
hostdict["secure.example.com"] = {"ssh-rsa": key, "ssh-dss": key_dss}
hostdict["fake.example.com"] = {}
hostdict["fake.example.com"]["ssh-rsa"] = key
assert len(hostdict) == 5
self.assertEqual(2, len(list(hostdict.values())[0]))
self.assertEqual(1, len(list(hostdict.values())[1]))
self.assertEqual(1, len(list(hostdict.values())[2]))
fp = hexlify(
hostdict["secure.example.com"]["ssh-rsa"].get_fingerprint()
).upper()
self.assertEqual(b"7EC91BB336CB6D810B124B1353C32396", fp)
fp = hexlify(
hostdict["secure.example.com"]["ssh-dss"].get_fingerprint()
).upper()
self.assertEqual(b"4478F0B9A23CC5182009FF755BC1D26C", fp)
def test_delitem(self):
hostdict = paramiko.HostKeys("hostfile.temp")
target = "happy.example.com"
hostdict[target] # will KeyError if not present
del hostdict[target]
try:
hostdict[target]
except KeyError:
pass # Good
else:
assert False, "Entry was not deleted from HostKeys on delitem!"
def test_entry_delitem(self):
hostdict = paramiko.HostKeys("hostfile.temp")
target = "happy.example.com"
entry = hostdict[target]
key_type_list = [key_type for key_type in entry]
for key_type in key_type_list:
del entry[key_type]
# will KeyError if not present
for key_type in key_type_list:
try:
del entry[key_type]
except KeyError:
pass # Good
else:
assert False, "Key was not deleted from Entry on delitem!"
class HostKeysTabsTest(HostKeysTest):
def setUp(self):
with open("hostfile.temp", "w") as f:
f.write(test_hosts_file_tabs)
| 7,247 | Python | .py | 151 | 42.165563 | 80 | 0.746007 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
583 | demo.py | paramiko_paramiko/demos/demo.py | #!/usr/bin/env python
# Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import base64
from binascii import hexlify
import getpass
import os
import select
import socket
import sys
import time
import traceback
from paramiko.py3compat import input
import paramiko
try:
import interactive
except ImportError:
from . import interactive
def agent_auth(transport, username):
"""
Attempt to authenticate to the given transport using any of the private
keys available from an SSH agent.
"""
agent = paramiko.Agent()
agent_keys = agent.get_keys()
if len(agent_keys) == 0:
return
for key in agent_keys:
print("Trying ssh-agent key %s" % hexlify(key.get_fingerprint()))
try:
transport.auth_publickey(username, key)
print("... success!")
return
except paramiko.SSHException:
print("... nope.")
def manual_auth(username, hostname):
default_auth = "p"
auth = input(
"Auth by (p)assword, (r)sa key, or (d)ss key? [%s] " % default_auth
)
if len(auth) == 0:
auth = default_auth
if auth == "r":
default_path = os.path.join(os.environ["HOME"], ".ssh", "id_rsa")
path = input("RSA key [%s]: " % default_path)
if len(path) == 0:
path = default_path
try:
key = paramiko.RSAKey.from_private_key_file(path)
except paramiko.PasswordRequiredException:
password = getpass.getpass("RSA key password: ")
key = paramiko.RSAKey.from_private_key_file(path, password)
t.auth_publickey(username, key)
elif auth == "d":
default_path = os.path.join(os.environ["HOME"], ".ssh", "id_dsa")
path = input("DSS key [%s]: " % default_path)
if len(path) == 0:
path = default_path
try:
key = paramiko.DSSKey.from_private_key_file(path)
except paramiko.PasswordRequiredException:
password = getpass.getpass("DSS key password: ")
key = paramiko.DSSKey.from_private_key_file(path, password)
t.auth_publickey(username, key)
else:
pw = getpass.getpass("Password for %s@%s: " % (username, hostname))
t.auth_password(username, pw)
# setup logging
paramiko.util.log_to_file("demo.log")
username = ""
if len(sys.argv) > 1:
hostname = sys.argv[1]
if hostname.find("@") >= 0:
username, hostname = hostname.split("@")
else:
hostname = input("Hostname: ")
if len(hostname) == 0:
print("*** Hostname required.")
sys.exit(1)
port = 22
if hostname.find(":") >= 0:
hostname, portstr = hostname.split(":")
port = int(portstr)
# now connect
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((hostname, port))
except Exception as e:
print("*** Connect failed: " + str(e))
traceback.print_exc()
sys.exit(1)
try:
t = paramiko.Transport(sock)
try:
t.start_client()
except paramiko.SSHException:
print("*** SSH negotiation failed.")
sys.exit(1)
try:
keys = paramiko.util.load_host_keys(
os.path.expanduser("~/.ssh/known_hosts")
)
except IOError:
try:
keys = paramiko.util.load_host_keys(
os.path.expanduser("~/ssh/known_hosts")
)
except IOError:
print("*** Unable to open host keys file")
keys = {}
# check server's host key -- this is important.
key = t.get_remote_server_key()
if hostname not in keys:
print("*** WARNING: Unknown host key!")
elif key.get_name() not in keys[hostname]:
print("*** WARNING: Unknown host key!")
elif keys[hostname][key.get_name()] != key:
print("*** WARNING: Host key has changed!!!")
sys.exit(1)
else:
print("*** Host key OK.")
# get username
if username == "":
default_username = getpass.getuser()
username = input("Username [%s]: " % default_username)
if len(username) == 0:
username = default_username
agent_auth(t, username)
if not t.is_authenticated():
manual_auth(username, hostname)
if not t.is_authenticated():
print("*** Authentication failed. :(")
t.close()
sys.exit(1)
chan = t.open_session()
chan.get_pty()
chan.invoke_shell()
print("*** Here we go!\n")
interactive.interactive_shell(chan)
chan.close()
t.close()
except Exception as e:
print("*** Caught exception: " + str(e.__class__) + ": " + str(e))
traceback.print_exc()
try:
t.close()
except:
pass
sys.exit(1)
| 5,453 | Python | .py | 164 | 27.256098 | 79 | 0.630839 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
584 | interactive.py | paramiko_paramiko/demos/interactive.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import socket
import sys
from paramiko.py3compat import u
# windows does not have termios...
try:
import termios
import tty
has_termios = True
except ImportError:
has_termios = False
def interactive_shell(chan):
if has_termios:
posix_shell(chan)
else:
windows_shell(chan)
def posix_shell(chan):
import select
oldtty = termios.tcgetattr(sys.stdin)
try:
tty.setraw(sys.stdin.fileno())
tty.setcbreak(sys.stdin.fileno())
chan.settimeout(0.0)
while True:
r, w, e = select.select([chan, sys.stdin], [], [])
if chan in r:
try:
x = u(chan.recv(1024))
if len(x) == 0:
sys.stdout.write("\r\n*** EOF\r\n")
break
sys.stdout.write(x)
sys.stdout.flush()
except socket.timeout:
pass
if sys.stdin in r:
x = sys.stdin.read(1)
if len(x) == 0:
break
chan.send(x)
finally:
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, oldtty)
# thanks to Mike Looijmans for this code
def windows_shell(chan):
import threading
sys.stdout.write(
"Line-buffered terminal emulation. Press F6 or ^Z to send EOF.\r\n\r\n"
)
def writeall(sock):
while True:
data = sock.recv(256)
if not data:
sys.stdout.write("\r\n*** EOF ***\r\n\r\n")
sys.stdout.flush()
break
sys.stdout.write(data)
sys.stdout.flush()
writer = threading.Thread(target=writeall, args=(chan,))
writer.start()
try:
while True:
d = sys.stdin.read(1)
if not d:
break
chan.send(d)
except EOFError:
# user hit ^Z or F6
pass
| 2,780 | Python | .py | 84 | 24.75 | 79 | 0.597238 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
585 | rforward.py | paramiko_paramiko/demos/rforward.py | #!/usr/bin/env python
# Copyright (C) 2008 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Sample script showing how to do remote port forwarding over paramiko.
This script connects to the requested SSH server and sets up remote port
forwarding (the openssh -R option) from a remote port through a tunneled
connection to a destination reachable from the local machine.
"""
import getpass
import os
import socket
import select
import sys
import threading
from optparse import OptionParser
import paramiko
SSH_PORT = 22
DEFAULT_PORT = 4000
g_verbose = True
def handler(chan, host, port):
sock = socket.socket()
try:
sock.connect((host, port))
except Exception as e:
verbose("Forwarding request to %s:%d failed: %r" % (host, port, e))
return
verbose(
"Connected! Tunnel open %r -> %r -> %r"
% (chan.origin_addr, chan.getpeername(), (host, port))
)
while True:
r, w, x = select.select([sock, chan], [], [])
if sock in r:
data = sock.recv(1024)
if len(data) == 0:
break
chan.send(data)
if chan in r:
data = chan.recv(1024)
if len(data) == 0:
break
sock.send(data)
chan.close()
sock.close()
verbose("Tunnel closed from %r" % (chan.origin_addr,))
def reverse_forward_tunnel(server_port, remote_host, remote_port, transport):
transport.request_port_forward("", server_port)
while True:
chan = transport.accept(1000)
if chan is None:
continue
thr = threading.Thread(
target=handler, args=(chan, remote_host, remote_port)
)
thr.setDaemon(True)
thr.start()
def verbose(s):
if g_verbose:
print(s)
HELP = """\
Set up a reverse forwarding tunnel across an SSH server, using paramiko. A
port on the SSH server (given with -p) is forwarded across an SSH session
back to the local machine, and out to a remote site reachable from this
network. This is similar to the openssh -R option.
"""
def get_host_port(spec, default_port):
"parse 'hostname:22' into a host and port, with the port optional"
args = (spec.split(":", 1) + [default_port])[:2]
args[1] = int(args[1])
return args[0], args[1]
def parse_options():
global g_verbose
parser = OptionParser(
usage="usage: %prog [options] <ssh-server>[:<server-port>]",
version="%prog 1.0",
description=HELP,
)
parser.add_option(
"-q",
"--quiet",
action="store_false",
dest="verbose",
default=True,
help="squelch all informational output",
)
parser.add_option(
"-p",
"--remote-port",
action="store",
type="int",
dest="port",
default=DEFAULT_PORT,
help="port on server to forward (default: %d)" % DEFAULT_PORT,
)
parser.add_option(
"-u",
"--user",
action="store",
type="string",
dest="user",
default=getpass.getuser(),
help="username for SSH authentication (default: %s)"
% getpass.getuser(),
)
parser.add_option(
"-K",
"--key",
action="store",
type="string",
dest="keyfile",
default=None,
help="private key file to use for SSH authentication",
)
parser.add_option(
"",
"--no-key",
action="store_false",
dest="look_for_keys",
default=True,
help="don't look for or use a private key file",
)
parser.add_option(
"-P",
"--password",
action="store_true",
dest="readpass",
default=False,
help="read password (for key or password auth) from stdin",
)
parser.add_option(
"-r",
"--remote",
action="store",
type="string",
dest="remote",
default=None,
metavar="host:port",
help="remote host and port to forward to",
)
options, args = parser.parse_args()
if len(args) != 1:
parser.error("Incorrect number of arguments.")
if options.remote is None:
parser.error("Remote address required (-r).")
g_verbose = options.verbose
server_host, server_port = get_host_port(args[0], SSH_PORT)
remote_host, remote_port = get_host_port(options.remote, SSH_PORT)
return options, (server_host, server_port), (remote_host, remote_port)
def main():
options, server, remote = parse_options()
password = None
if options.readpass:
password = getpass.getpass("Enter SSH password: ")
client = paramiko.SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(paramiko.WarningPolicy())
verbose("Connecting to ssh host %s:%d ..." % (server[0], server[1]))
try:
client.connect(
server[0],
server[1],
username=options.user,
key_filename=options.keyfile,
look_for_keys=options.look_for_keys,
password=password,
)
except Exception as e:
print("*** Failed to connect to %s:%d: %r" % (server[0], server[1], e))
sys.exit(1)
verbose(
"Now forwarding remote port %d to %s:%d ..."
% (options.port, remote[0], remote[1])
)
try:
reverse_forward_tunnel(
options.port, remote[0], remote[1], client.get_transport()
)
except KeyboardInterrupt:
print("C-c: Port forwarding stopped.")
sys.exit(0)
if __name__ == "__main__":
main()
| 6,367 | Python | .py | 198 | 25.641414 | 79 | 0.619684 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
586 | demo_server.py | paramiko_paramiko/demos/demo_server.py | #!/usr/bin/env python
# Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import base64
from binascii import hexlify
import os
import socket
import sys
import threading
import traceback
import paramiko
from paramiko.py3compat import b, u, decodebytes
# setup logging
paramiko.util.log_to_file("demo_server.log")
host_key = paramiko.RSAKey(filename="test_rsa.key")
# host_key = paramiko.DSSKey(filename='test_dss.key')
print("Read key: " + u(hexlify(host_key.get_fingerprint())))
class Server(paramiko.ServerInterface):
# 'data' is the output of base64.b64encode(key)
# (using the "user_rsa_key" files)
data = (
b"AAAAB3NzaC1yc2EAAAABIwAAAIEAyO4it3fHlmGZWJaGrfeHOVY7RWO3P9M7hp"
b"fAu7jJ2d7eothvfeuoRFtJwhUmZDluRdFyhFY/hFAh76PJKGAusIqIQKlkJxMC"
b"KDqIexkgHAfID/6mqvmnSJf0b5W8v5h2pI/stOSwTQ+pxVhwJ9ctYDhRSlF0iT"
b"UWT10hcuO4Ks8="
)
good_pub_key = paramiko.RSAKey(data=decodebytes(data))
def __init__(self):
self.event = threading.Event()
def check_channel_request(self, kind, chanid):
if kind == "session":
return paramiko.OPEN_SUCCEEDED
return paramiko.OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
def check_auth_password(self, username, password):
if (username == "robey") and (password == "foo"):
return paramiko.AUTH_SUCCESSFUL
return paramiko.AUTH_FAILED
def check_auth_publickey(self, username, key):
print("Auth attempt with key: " + u(hexlify(key.get_fingerprint())))
if (username == "robey") and (key == self.good_pub_key):
return paramiko.AUTH_SUCCESSFUL
return paramiko.AUTH_FAILED
def check_auth_gssapi_with_mic(
self, username, gss_authenticated=paramiko.AUTH_FAILED, cc_file=None
):
"""
.. note::
We are just checking in `AuthHandler` that the given user is a
valid krb5 principal! We don't check if the krb5 principal is
allowed to log in on the server, because there is no way to do that
in python. So if you develop your own SSH server with paramiko for
a certain platform like Linux, you should call ``krb5_kuserok()`` in
your local kerberos library to make sure that the krb5_principal
has an account on the server and is allowed to log in as a user.
.. seealso::
`krb5_kuserok() man page
<http://www.unix.com/man-page/all/3/krb5_kuserok/>`_
"""
if gss_authenticated == paramiko.AUTH_SUCCESSFUL:
return paramiko.AUTH_SUCCESSFUL
return paramiko.AUTH_FAILED
def check_auth_gssapi_keyex(
self, username, gss_authenticated=paramiko.AUTH_FAILED, cc_file=None
):
if gss_authenticated == paramiko.AUTH_SUCCESSFUL:
return paramiko.AUTH_SUCCESSFUL
return paramiko.AUTH_FAILED
def enable_auth_gssapi(self):
return True
def get_allowed_auths(self, username):
return "gssapi-keyex,gssapi-with-mic,password,publickey"
def check_channel_shell_request(self, channel):
self.event.set()
return True
def check_channel_pty_request(
self, channel, term, width, height, pixelwidth, pixelheight, modes
):
return True
DoGSSAPIKeyExchange = True
# now connect
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(("", 2200))
except Exception as e:
print("*** Bind failed: " + str(e))
traceback.print_exc()
sys.exit(1)
try:
sock.listen(100)
print("Listening for connection ...")
client, addr = sock.accept()
except Exception as e:
print("*** Listen/accept failed: " + str(e))
traceback.print_exc()
sys.exit(1)
print("Got a connection!")
try:
t = paramiko.Transport(client, gss_kex=DoGSSAPIKeyExchange)
t.set_gss_host(socket.getfqdn(""))
try:
t.load_server_moduli()
except:
print("(Failed to load moduli -- gex will be unsupported.)")
raise
t.add_server_key(host_key)
server = Server()
try:
t.start_server(server=server)
except paramiko.SSHException:
print("*** SSH negotiation failed.")
sys.exit(1)
# wait for auth
chan = t.accept(20)
if chan is None:
print("*** No channel.")
sys.exit(1)
print("Authenticated!")
server.event.wait(10)
if not server.event.is_set():
print("*** Client never asked for a shell.")
sys.exit(1)
chan.send("\r\n\r\nWelcome to my dorky little BBS!\r\n\r\n")
chan.send(
"We are on fire all the time! Hooray! Candy corn for everyone!\r\n"
)
chan.send("Happy birthday to Robot Dave!\r\n\r\n")
chan.send("Username: ")
f = chan.makefile("rU")
username = f.readline().strip("\r\n")
chan.send("\r\nI don't like you, " + username + ".\r\n")
chan.close()
except Exception as e:
print("*** Caught exception: " + str(e.__class__) + ": " + str(e))
traceback.print_exc()
try:
t.close()
except:
pass
sys.exit(1)
| 5,914 | Python | .py | 155 | 32.264516 | 80 | 0.675161 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
587 | demo_sftp.py | paramiko_paramiko/demos/demo_sftp.py | #!/usr/bin/env python
# Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# based on code provided by raymond mosteller (thanks!)
import base64
import getpass
import os
import socket
import sys
import traceback
import paramiko
from paramiko.py3compat import input
# setup logging
paramiko.util.log_to_file("demo_sftp.log")
# Paramiko client configuration
UseGSSAPI = True # enable GSS-API / SSPI authentication
DoGSSAPIKeyExchange = True
Port = 22
# get hostname
username = ""
if len(sys.argv) > 1:
hostname = sys.argv[1]
if hostname.find("@") >= 0:
username, hostname = hostname.split("@")
else:
hostname = input("Hostname: ")
if len(hostname) == 0:
print("*** Hostname required.")
sys.exit(1)
if hostname.find(":") >= 0:
hostname, portstr = hostname.split(":")
Port = int(portstr)
# get username
if username == "":
default_username = getpass.getuser()
username = input("Username [%s]: " % default_username)
if len(username) == 0:
username = default_username
if not UseGSSAPI:
password = getpass.getpass("Password for %s@%s: " % (username, hostname))
else:
password = None
# get host key, if we know one
hostkeytype = None
hostkey = None
try:
host_keys = paramiko.util.load_host_keys(
os.path.expanduser("~/.ssh/known_hosts")
)
except IOError:
try:
# try ~/ssh/ too, because windows can't have a folder named ~/.ssh/
host_keys = paramiko.util.load_host_keys(
os.path.expanduser("~/ssh/known_hosts")
)
except IOError:
print("*** Unable to open host keys file")
host_keys = {}
if hostname in host_keys:
hostkeytype = host_keys[hostname].keys()[0]
hostkey = host_keys[hostname][hostkeytype]
print("Using host key of type %s" % hostkeytype)
# now, connect and use paramiko Transport to negotiate SSH2 across the connection
try:
t = paramiko.Transport((hostname, Port))
t.connect(
hostkey,
username,
password,
gss_host=socket.getfqdn(hostname),
gss_auth=UseGSSAPI,
gss_kex=DoGSSAPIKeyExchange,
)
sftp = paramiko.SFTPClient.from_transport(t)
# dirlist on remote host
dirlist = sftp.listdir(".")
print("Dirlist: %s" % dirlist)
# copy this demo onto the server
try:
sftp.mkdir("demo_sftp_folder")
except IOError:
print("(assuming demo_sftp_folder/ already exists)")
with sftp.open("demo_sftp_folder/README", "w") as f:
f.write("This was created by demo_sftp.py.\n")
with open("demo_sftp.py", "r") as f:
data = f.read()
sftp.open("demo_sftp_folder/demo_sftp.py", "w").write(data)
print("created demo_sftp_folder/ on the server")
# copy the README back here
with sftp.open("demo_sftp_folder/README", "r") as f:
data = f.read()
with open("README_demo_sftp", "w") as f:
f.write(data)
print("copied README back here")
# BETTER: use the get() and put() methods
sftp.put("demo_sftp.py", "demo_sftp_folder/demo_sftp.py")
sftp.get("demo_sftp_folder/README", "README_demo_sftp")
t.close()
except Exception as e:
print("*** Caught exception: %s: %s" % (e.__class__, e))
traceback.print_exc()
try:
t.close()
except:
pass
sys.exit(1)
| 4,076 | Python | .py | 121 | 29.429752 | 81 | 0.681414 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
588 | demo_simple.py | paramiko_paramiko/demos/demo_simple.py | #!/usr/bin/env python
# Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import base64
import getpass
import os
import socket
import sys
import traceback
from paramiko.py3compat import input
import paramiko
try:
import interactive
except ImportError:
from . import interactive
# setup logging
paramiko.util.log_to_file("demo_simple.log")
# Paramiko client configuration
UseGSSAPI = (
paramiko.GSS_AUTH_AVAILABLE
) # enable "gssapi-with-mic" authentication, if supported by your python installation
DoGSSAPIKeyExchange = (
paramiko.GSS_AUTH_AVAILABLE
) # enable "gssapi-kex" key exchange, if supported by your python installation
# UseGSSAPI = False
# DoGSSAPIKeyExchange = False
port = 22
# get hostname
username = ""
if len(sys.argv) > 1:
hostname = sys.argv[1]
if hostname.find("@") >= 0:
username, hostname = hostname.split("@")
else:
hostname = input("Hostname: ")
if len(hostname) == 0:
print("*** Hostname required.")
sys.exit(1)
if hostname.find(":") >= 0:
hostname, portstr = hostname.split(":")
port = int(portstr)
# get username
if username == "":
default_username = getpass.getuser()
username = input("Username [%s]: " % default_username)
if len(username) == 0:
username = default_username
if not UseGSSAPI and not DoGSSAPIKeyExchange:
password = getpass.getpass("Password for %s@%s: " % (username, hostname))
# now, connect and use paramiko Client to negotiate SSH2 across the connection
try:
client = paramiko.SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(paramiko.WarningPolicy())
print("*** Connecting...")
if not UseGSSAPI and not DoGSSAPIKeyExchange:
client.connect(hostname, port, username, password)
else:
try:
client.connect(
hostname,
port,
username,
gss_auth=UseGSSAPI,
gss_kex=DoGSSAPIKeyExchange,
)
except Exception:
# traceback.print_exc()
password = getpass.getpass(
"Password for %s@%s: " % (username, hostname)
)
client.connect(hostname, port, username, password)
chan = client.invoke_shell()
print(repr(client.get_transport()))
print("*** Here we go!\n")
interactive.interactive_shell(chan)
chan.close()
client.close()
except Exception as e:
print("*** Caught exception: %s: %s" % (e.__class__, e))
traceback.print_exc()
try:
client.close()
except:
pass
sys.exit(1)
| 3,364 | Python | .py | 101 | 28.673267 | 86 | 0.689963 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
589 | forward.py | paramiko_paramiko/demos/forward.py | #!/usr/bin/env python
# Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Sample script showing how to do local port forwarding over paramiko.
This script connects to the requested SSH server and sets up local port
forwarding (the openssh -L option) from a local port through a tunneled
connection to a destination reachable from the SSH server machine.
"""
import getpass
import os
import socket
import select
try:
import SocketServer
except ImportError:
import socketserver as SocketServer
import sys
from optparse import OptionParser
import paramiko
SSH_PORT = 22
DEFAULT_PORT = 4000
g_verbose = True
class ForwardServer(SocketServer.ThreadingTCPServer):
daemon_threads = True
allow_reuse_address = True
class Handler(SocketServer.BaseRequestHandler):
def handle(self):
try:
chan = self.ssh_transport.open_channel(
"direct-tcpip",
(self.chain_host, self.chain_port),
self.request.getpeername(),
)
except Exception as e:
verbose(
"Incoming request to %s:%d failed: %s"
% (self.chain_host, self.chain_port, repr(e))
)
return
if chan is None:
verbose(
"Incoming request to %s:%d was rejected by the SSH server."
% (self.chain_host, self.chain_port)
)
return
verbose(
"Connected! Tunnel open %r -> %r -> %r"
% (
self.request.getpeername(),
chan.getpeername(),
(self.chain_host, self.chain_port),
)
)
while True:
r, w, x = select.select([self.request, chan], [], [])
if self.request in r:
data = self.request.recv(1024)
if len(data) == 0:
break
chan.send(data)
if chan in r:
data = chan.recv(1024)
if len(data) == 0:
break
self.request.send(data)
peername = self.request.getpeername()
chan.close()
self.request.close()
verbose("Tunnel closed from %r" % (peername,))
def forward_tunnel(local_port, remote_host, remote_port, transport):
# this is a little convoluted, but lets me configure things for the Handler
# object. (SocketServer doesn't give Handlers any way to access the outer
# server normally.)
class SubHander(Handler):
chain_host = remote_host
chain_port = remote_port
ssh_transport = transport
ForwardServer(("", local_port), SubHander).serve_forever()
def verbose(s):
if g_verbose:
print(s)
HELP = """\
Set up a forward tunnel across an SSH server, using paramiko. A local port
(given with -p) is forwarded across an SSH session to an address:port from
the SSH server. This is similar to the openssh -L option.
"""
def get_host_port(spec, default_port):
"parse 'hostname:22' into a host and port, with the port optional"
args = (spec.split(":", 1) + [default_port])[:2]
args[1] = int(args[1])
return args[0], args[1]
def parse_options():
global g_verbose
parser = OptionParser(
usage="usage: %prog [options] <ssh-server>[:<server-port>]",
version="%prog 1.0",
description=HELP,
)
parser.add_option(
"-q",
"--quiet",
action="store_false",
dest="verbose",
default=True,
help="squelch all informational output",
)
parser.add_option(
"-p",
"--local-port",
action="store",
type="int",
dest="port",
default=DEFAULT_PORT,
help="local port to forward (default: %d)" % DEFAULT_PORT,
)
parser.add_option(
"-u",
"--user",
action="store",
type="string",
dest="user",
default=getpass.getuser(),
help="username for SSH authentication (default: %s)"
% getpass.getuser(),
)
parser.add_option(
"-K",
"--key",
action="store",
type="string",
dest="keyfile",
default=None,
help="private key file to use for SSH authentication",
)
parser.add_option(
"",
"--no-key",
action="store_false",
dest="look_for_keys",
default=True,
help="don't look for or use a private key file",
)
parser.add_option(
"-P",
"--password",
action="store_true",
dest="readpass",
default=False,
help="read password (for key or password auth) from stdin",
)
parser.add_option(
"-r",
"--remote",
action="store",
type="string",
dest="remote",
default=None,
metavar="host:port",
help="remote host and port to forward to",
)
options, args = parser.parse_args()
if len(args) != 1:
parser.error("Incorrect number of arguments.")
if options.remote is None:
parser.error("Remote address required (-r).")
g_verbose = options.verbose
server_host, server_port = get_host_port(args[0], SSH_PORT)
remote_host, remote_port = get_host_port(options.remote, SSH_PORT)
return options, (server_host, server_port), (remote_host, remote_port)
def main():
options, server, remote = parse_options()
password = None
if options.readpass:
password = getpass.getpass("Enter SSH password: ")
client = paramiko.SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(paramiko.WarningPolicy())
verbose("Connecting to ssh host %s:%d ..." % (server[0], server[1]))
try:
client.connect(
server[0],
server[1],
username=options.user,
key_filename=options.keyfile,
look_for_keys=options.look_for_keys,
password=password,
)
except Exception as e:
print("*** Failed to connect to %s:%d: %r" % (server[0], server[1], e))
sys.exit(1)
verbose(
"Now forwarding port %d to %s:%d ..."
% (options.port, remote[0], remote[1])
)
try:
forward_tunnel(
options.port, remote[0], remote[1], client.get_transport()
)
except KeyboardInterrupt:
print("C-c: Port forwarding stopped.")
sys.exit(0)
if __name__ == "__main__":
main()
| 7,254 | Python | .py | 219 | 25.630137 | 79 | 0.606403 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
590 | demo_keygen.py | paramiko_paramiko/demos/demo_keygen.py | #!/usr/bin/env python
# Copyright (C) 2010 Sofian Brabez <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import sys
from binascii import hexlify
from optparse import OptionParser
from paramiko import DSSKey
from paramiko import RSAKey
from paramiko.ssh_exception import SSHException
from paramiko.py3compat import u
usage = """
%prog [-v] [-b bits] -t type [-N new_passphrase] [-f output_keyfile]"""
default_values = {
"ktype": "dsa",
"bits": 1024,
"filename": "output",
"comment": "",
}
key_dispatch_table = {"dsa": DSSKey, "rsa": RSAKey}
def progress(arg=None):
if not arg:
sys.stdout.write("0%\x08\x08\x08 ")
sys.stdout.flush()
elif arg[0] == "p":
sys.stdout.write("25%\x08\x08\x08\x08 ")
sys.stdout.flush()
elif arg[0] == "h":
sys.stdout.write("50%\x08\x08\x08\x08 ")
sys.stdout.flush()
elif arg[0] == "x":
sys.stdout.write("75%\x08\x08\x08\x08 ")
sys.stdout.flush()
if __name__ == "__main__":
phrase = None
pfunc = None
parser = OptionParser(usage=usage)
parser.add_option(
"-t",
"--type",
type="string",
dest="ktype",
help="Specify type of key to create (dsa or rsa)",
metavar="ktype",
default=default_values["ktype"],
)
parser.add_option(
"-b",
"--bits",
type="int",
dest="bits",
help="Number of bits in the key to create",
metavar="bits",
default=default_values["bits"],
)
parser.add_option(
"-N",
"--new-passphrase",
dest="newphrase",
help="Provide new passphrase",
metavar="phrase",
)
parser.add_option(
"-P",
"--old-passphrase",
dest="oldphrase",
help="Provide old passphrase",
metavar="phrase",
)
parser.add_option(
"-f",
"--filename",
type="string",
dest="filename",
help="Filename of the key file",
metavar="filename",
default=default_values["filename"],
)
parser.add_option(
"-q", "--quiet", default=False, action="store_false", help="Quiet"
)
parser.add_option(
"-v", "--verbose", default=False, action="store_true", help="Verbose"
)
parser.add_option(
"-C",
"--comment",
type="string",
dest="comment",
help="Provide a new comment",
metavar="comment",
default=default_values["comment"],
)
(options, args) = parser.parse_args()
if len(sys.argv) == 1:
parser.print_help()
sys.exit(0)
for o in list(default_values.keys()):
globals()[o] = getattr(options, o, default_values[o.lower()])
if options.newphrase:
phrase = getattr(options, "newphrase")
if options.verbose:
pfunc = progress
sys.stdout.write(
"Generating priv/pub %s %d bits key pair (%s/%s.pub)..."
% (ktype, bits, filename, filename)
)
sys.stdout.flush()
if ktype == "dsa" and bits > 1024:
raise SSHException("DSA Keys must be 1024 bits")
if ktype not in key_dispatch_table:
raise SSHException(
"Unknown %s algorithm to generate keys pair" % ktype
)
# generating private key
prv = key_dispatch_table[ktype].generate(bits=bits, progress_func=pfunc)
prv.write_private_key_file(filename, password=phrase)
# generating public key
pub = key_dispatch_table[ktype](filename=filename, password=phrase)
with open("%s.pub" % filename, "w") as f:
f.write("%s %s" % (pub.get_name(), pub.get_base64()))
if options.comment:
f.write(" %s" % comment)
if options.verbose:
print("done.")
hash = u(hexlify(pub.get_fingerprint()))
print(
"Fingerprint: %d %s %s.pub (%s)"
% (
bits,
":".join([hash[i : 2 + i] for i in range(0, len(hash), 2)]),
filename,
ktype.upper(),
)
)
| 4,761 | Python | .py | 149 | 25.469799 | 79 | 0.604753 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
591 | client.py | paramiko_paramiko/paramiko/client.py | # Copyright (C) 2006-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
SSH client & key policies
"""
from binascii import hexlify
import getpass
import inspect
import os
import socket
import warnings
from errno import ECONNREFUSED, EHOSTUNREACH
from paramiko.agent import Agent
from paramiko.common import DEBUG
from paramiko.config import SSH_PORT
from paramiko.dsskey import DSSKey
from paramiko.ecdsakey import ECDSAKey
from paramiko.ed25519key import Ed25519Key
from paramiko.hostkeys import HostKeys
from paramiko.rsakey import RSAKey
from paramiko.ssh_exception import (
SSHException,
BadHostKeyException,
NoValidConnectionsError,
)
from paramiko.transport import Transport
from paramiko.util import ClosingContextManager
class SSHClient(ClosingContextManager):
"""
A high-level representation of a session with an SSH server. This class
wraps `.Transport`, `.Channel`, and `.SFTPClient` to take care of most
aspects of authenticating and opening channels. A typical use case is::
client = SSHClient()
client.load_system_host_keys()
client.connect('ssh.example.com')
stdin, stdout, stderr = client.exec_command('ls -l')
You may pass in explicit overrides for authentication and server host key
checking. The default mechanism is to try to use local key files or an
SSH agent (if one is running).
Instances of this class may be used as context managers.
.. versionadded:: 1.6
"""
def __init__(self):
"""
Create a new SSHClient.
"""
self._system_host_keys = HostKeys()
self._host_keys = HostKeys()
self._host_keys_filename = None
self._log_channel = None
self._policy = RejectPolicy()
self._transport = None
self._agent = None
def load_system_host_keys(self, filename=None):
"""
Load host keys from a system (read-only) file. Host keys read with
this method will not be saved back by `save_host_keys`.
This method can be called multiple times. Each new set of host keys
will be merged with the existing set (new replacing old if there are
conflicts).
If ``filename`` is left as ``None``, an attempt will be made to read
keys from the user's local "known hosts" file, as used by OpenSSH,
and no exception will be raised if the file can't be read. This is
probably only useful on posix.
:param str filename: the filename to read, or ``None``
:raises: ``IOError`` --
if a filename was provided and the file could not be read
"""
if filename is None:
# try the user's .ssh key file, and mask exceptions
filename = os.path.expanduser("~/.ssh/known_hosts")
try:
self._system_host_keys.load(filename)
except IOError:
pass
return
self._system_host_keys.load(filename)
def load_host_keys(self, filename):
"""
Load host keys from a local host-key file. Host keys read with this
method will be checked after keys loaded via `load_system_host_keys`,
but will be saved back by `save_host_keys` (so they can be modified).
The missing host key policy `.AutoAddPolicy` adds keys to this set and
saves them, when connecting to a previously-unknown server.
This method can be called multiple times. Each new set of host keys
will be merged with the existing set (new replacing old if there are
conflicts). When automatically saving, the last hostname is used.
:param str filename: the filename to read
:raises: ``IOError`` -- if the filename could not be read
"""
self._host_keys_filename = filename
self._host_keys.load(filename)
def save_host_keys(self, filename):
"""
Save the host keys back to a file. Only the host keys loaded with
`load_host_keys` (plus any added directly) will be saved -- not any
host keys loaded with `load_system_host_keys`.
:param str filename: the filename to save to
:raises: ``IOError`` -- if the file could not be written
"""
# update local host keys from file (in case other SSH clients
# have written to the known_hosts file meanwhile.
if self._host_keys_filename is not None:
self.load_host_keys(self._host_keys_filename)
with open(filename, "w") as f:
for hostname, keys in self._host_keys.items():
for keytype, key in keys.items():
f.write(
"{} {} {}\n".format(
hostname, keytype, key.get_base64()
)
)
def get_host_keys(self):
"""
Get the local `.HostKeys` object. This can be used to examine the
local host keys or change them.
:return: the local host keys as a `.HostKeys` object.
"""
return self._host_keys
def set_log_channel(self, name):
"""
Set the channel for logging. The default is ``"paramiko.transport"``
but it can be set to anything you want.
:param str name: new channel name for logging
"""
self._log_channel = name
def set_missing_host_key_policy(self, policy):
"""
Set policy to use when connecting to servers without a known host key.
Specifically:
* A **policy** is a "policy class" (or instance thereof), namely some
subclass of `.MissingHostKeyPolicy` such as `.RejectPolicy` (the
default), `.AutoAddPolicy`, `.WarningPolicy`, or a user-created
subclass.
* A host key is **known** when it appears in the client object's cached
host keys structures (those manipulated by `load_system_host_keys`
and/or `load_host_keys`).
:param .MissingHostKeyPolicy policy:
the policy to use when receiving a host key from a
previously-unknown server
"""
if inspect.isclass(policy):
policy = policy()
self._policy = policy
def _families_and_addresses(self, hostname, port):
"""
Yield pairs of address families and addresses to try for connecting.
:param str hostname: the server to connect to
:param int port: the server port to connect to
:returns: Yields an iterable of ``(family, address)`` tuples
"""
guess = True
addrinfos = socket.getaddrinfo(
hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM
)
for (family, socktype, proto, canonname, sockaddr) in addrinfos:
if socktype == socket.SOCK_STREAM:
yield family, sockaddr
guess = False
# some OS like AIX don't indicate SOCK_STREAM support, so just
# guess. :( We only do this if we did not get a single result marked
# as socktype == SOCK_STREAM.
if guess:
for family, _, _, _, sockaddr in addrinfos:
yield family, sockaddr
def connect(
self,
hostname,
port=SSH_PORT,
username=None,
password=None,
pkey=None,
key_filename=None,
timeout=None,
allow_agent=True,
look_for_keys=True,
compress=False,
sock=None,
gss_auth=False,
gss_kex=False,
gss_deleg_creds=True,
gss_host=None,
banner_timeout=None,
auth_timeout=None,
channel_timeout=None,
gss_trust_dns=True,
passphrase=None,
disabled_algorithms=None,
transport_factory=None,
auth_strategy=None,
):
"""
Connect to an SSH server and authenticate to it. The server's host key
is checked against the system host keys (see `load_system_host_keys`)
and any local host keys (`load_host_keys`). If the server's hostname
is not found in either set of host keys, the missing host key policy
is used (see `set_missing_host_key_policy`). The default policy is
to reject the key and raise an `.SSHException`.
Authentication is attempted in the following order of priority:
- The ``pkey`` or ``key_filename`` passed in (if any)
- ``key_filename`` may contain OpenSSH public certificate paths
as well as regular private-key paths; when files ending in
``-cert.pub`` are found, they are assumed to match a private
key, and both components will be loaded. (The private key
itself does *not* need to be listed in ``key_filename`` for
this to occur - *just* the certificate.)
- Any key we can find through an SSH agent
- Any "id_rsa", "id_dsa" or "id_ecdsa" key discoverable in
``~/.ssh/``
- When OpenSSH-style public certificates exist that match an
existing such private key (so e.g. one has ``id_rsa`` and
``id_rsa-cert.pub``) the certificate will be loaded alongside
the private key and used for authentication.
- Plain username/password auth, if a password was given
If a private key requires a password to unlock it, and a password is
passed in, that password will be used to attempt to unlock the key.
:param str hostname: the server to connect to
:param int port: the server port to connect to
:param str username:
the username to authenticate as (defaults to the current local
username)
:param str password:
Used for password authentication; is also used for private key
decryption if ``passphrase`` is not given.
:param str passphrase:
Used for decrypting private keys.
:param .PKey pkey: an optional private key to use for authentication
:param str key_filename:
the filename, or list of filenames, of optional private key(s)
and/or certs to try for authentication
:param float timeout:
an optional timeout (in seconds) for the TCP connect
:param bool allow_agent:
set to False to disable connecting to the SSH agent
:param bool look_for_keys:
set to False to disable searching for discoverable private key
files in ``~/.ssh/``
:param bool compress: set to True to turn on compression
:param socket sock:
an open socket or socket-like object (such as a `.Channel`) to use
for communication to the target host
:param bool gss_auth:
``True`` if you want to use GSS-API authentication
:param bool gss_kex:
Perform GSS-API Key Exchange and user authentication
:param bool gss_deleg_creds: Delegate GSS-API client credentials or not
:param str gss_host:
The targets name in the kerberos database. default: hostname
:param bool gss_trust_dns:
Indicates whether or not the DNS is trusted to securely
canonicalize the name of the host being connected to (default
``True``).
:param float banner_timeout: an optional timeout (in seconds) to wait
for the SSH banner to be presented.
:param float auth_timeout: an optional timeout (in seconds) to wait for
an authentication response.
:param float channel_timeout: an optional timeout (in seconds) to wait
for a channel open response.
:param dict disabled_algorithms:
an optional dict passed directly to `.Transport` and its keyword
argument of the same name.
:param transport_factory:
an optional callable which is handed a subset of the constructor
arguments (primarily those related to the socket, GSS
functionality, and algorithm selection) and generates a
`.Transport` instance to be used by this client. Defaults to
`.Transport.__init__`.
:param auth_strategy:
an optional instance of `.AuthStrategy`, triggering use of this
newer authentication mechanism instead of SSHClient's legacy auth
method.
.. warning::
This parameter is **incompatible** with all other
authentication-related parameters (such as, but not limited to,
``password``, ``key_filename`` and ``allow_agent``) and will
trigger an exception if given alongside them.
:returns:
`.AuthResult` if ``auth_strategy`` is non-``None``; otherwise,
returns ``None``.
:raises BadHostKeyException:
if the server's host key could not be verified.
:raises AuthenticationException:
if authentication failed.
:raises UnableToAuthenticate:
if authentication failed (when ``auth_strategy`` is non-``None``;
and note that this is a subclass of ``AuthenticationException``).
:raises socket.error:
if a socket error (other than connection-refused or
host-unreachable) occurred while connecting.
:raises NoValidConnectionsError:
if all valid connection targets for the requested hostname (eg IPv4
and IPv6) yielded connection-refused or host-unreachable socket
errors.
:raises SSHException:
if there was any other error connecting or establishing an SSH
session.
.. versionchanged:: 1.15
Added the ``banner_timeout``, ``gss_auth``, ``gss_kex``,
``gss_deleg_creds`` and ``gss_host`` arguments.
.. versionchanged:: 2.3
Added the ``gss_trust_dns`` argument.
.. versionchanged:: 2.4
Added the ``passphrase`` argument.
.. versionchanged:: 2.6
Added the ``disabled_algorithms`` argument.
.. versionchanged:: 2.12
Added the ``transport_factory`` argument.
.. versionchanged:: 3.2
Added the ``auth_strategy`` argument.
"""
if not sock:
errors = {}
# Try multiple possible address families (e.g. IPv4 vs IPv6)
to_try = list(self._families_and_addresses(hostname, port))
for af, addr in to_try:
try:
sock = socket.socket(af, socket.SOCK_STREAM)
if timeout is not None:
try:
sock.settimeout(timeout)
except:
pass
sock.connect(addr)
# Break out of the loop on success
break
except socket.error as e:
# As mentioned in socket docs it is better
# to close sockets explicitly
if sock:
sock.close()
# Raise anything that isn't a straight up connection error
# (such as a resolution error)
if e.errno not in (ECONNREFUSED, EHOSTUNREACH):
raise
# Capture anything else so we know how the run looks once
# iteration is complete. Retain info about which attempt
# this was.
errors[addr] = e
# Make sure we explode usefully if no address family attempts
# succeeded. We've no way of knowing which error is the "right"
# one, so we construct a hybrid exception containing all the real
# ones, of a subclass that client code should still be watching for
# (socket.error)
if len(errors) == len(to_try):
raise NoValidConnectionsError(errors)
if transport_factory is None:
transport_factory = Transport
t = self._transport = transport_factory(
sock,
gss_kex=gss_kex,
gss_deleg_creds=gss_deleg_creds,
disabled_algorithms=disabled_algorithms,
)
t.use_compression(compress=compress)
t.set_gss_host(
# t.hostname may be None, but GSS-API requires a target name.
# Therefore use hostname as fallback.
gss_host=gss_host or hostname,
trust_dns=gss_trust_dns,
gssapi_requested=gss_auth or gss_kex,
)
if self._log_channel is not None:
t.set_log_channel(self._log_channel)
if banner_timeout is not None:
t.banner_timeout = banner_timeout
if auth_timeout is not None:
t.auth_timeout = auth_timeout
if channel_timeout is not None:
t.channel_timeout = channel_timeout
if port == SSH_PORT:
server_hostkey_name = hostname
else:
server_hostkey_name = "[{}]:{}".format(hostname, port)
our_server_keys = None
our_server_keys = self._system_host_keys.get(server_hostkey_name)
if our_server_keys is None:
our_server_keys = self._host_keys.get(server_hostkey_name)
if our_server_keys is not None:
keytype = our_server_keys.keys()[0]
sec_opts = t.get_security_options()
other_types = [x for x in sec_opts.key_types if x != keytype]
sec_opts.key_types = [keytype] + other_types
t.start_client(timeout=timeout)
# If GSS-API Key Exchange is performed we are not required to check the
# host key, because the host is authenticated via GSS-API / SSPI as
# well as our client.
if not self._transport.gss_kex_used:
server_key = t.get_remote_server_key()
if our_server_keys is None:
# will raise exception if the key is rejected
self._policy.missing_host_key(
self, server_hostkey_name, server_key
)
else:
our_key = our_server_keys.get(server_key.get_name())
if our_key != server_key:
if our_key is None:
our_key = list(our_server_keys.values())[0]
raise BadHostKeyException(hostname, server_key, our_key)
if username is None:
username = getpass.getuser()
# New auth flow!
if auth_strategy is not None:
return auth_strategy.authenticate(transport=t)
# Old auth flow!
if key_filename is None:
key_filenames = []
elif isinstance(key_filename, str):
key_filenames = [key_filename]
else:
key_filenames = key_filename
self._auth(
username,
password,
pkey,
key_filenames,
allow_agent,
look_for_keys,
gss_auth,
gss_kex,
gss_deleg_creds,
t.gss_host,
passphrase,
)
def close(self):
"""
Close this SSHClient and its underlying `.Transport`.
This should be called anytime you are done using the client object.
.. warning::
Paramiko registers garbage collection hooks that will try to
automatically close connections for you, but this is not presently
reliable. Failure to explicitly close your client after use may
lead to end-of-process hangs!
"""
if self._transport is None:
return
self._transport.close()
self._transport = None
if self._agent is not None:
self._agent.close()
self._agent = None
def exec_command(
self,
command,
bufsize=-1,
timeout=None,
get_pty=False,
environment=None,
):
"""
Execute a command on the SSH server. A new `.Channel` is opened and
the requested command is executed. The command's input and output
streams are returned as Python ``file``-like objects representing
stdin, stdout, and stderr.
:param str command: the command to execute
:param int bufsize:
interpreted the same way as by the built-in ``file()`` function in
Python
:param int timeout:
set command's channel timeout. See `.Channel.settimeout`
:param bool get_pty:
Request a pseudo-terminal from the server (default ``False``).
See `.Channel.get_pty`
:param dict environment:
a dict of shell environment variables, to be merged into the
default environment that the remote command executes within.
.. warning::
Servers may silently reject some environment variables; see the
warning in `.Channel.set_environment_variable` for details.
:return:
the stdin, stdout, and stderr of the executing command, as a
3-tuple
:raises: `.SSHException` -- if the server fails to execute the command
.. versionchanged:: 1.10
Added the ``get_pty`` kwarg.
"""
chan = self._transport.open_session(timeout=timeout)
if get_pty:
chan.get_pty()
chan.settimeout(timeout)
if environment:
chan.update_environment(environment)
chan.exec_command(command)
stdin = chan.makefile_stdin("wb", bufsize)
stdout = chan.makefile("r", bufsize)
stderr = chan.makefile_stderr("r", bufsize)
return stdin, stdout, stderr
def invoke_shell(
self,
term="vt100",
width=80,
height=24,
width_pixels=0,
height_pixels=0,
environment=None,
):
"""
Start an interactive shell session on the SSH server. A new `.Channel`
is opened and connected to a pseudo-terminal using the requested
terminal type and size.
:param str term:
the terminal type to emulate (for example, ``"vt100"``)
:param int width: the width (in characters) of the terminal window
:param int height: the height (in characters) of the terminal window
:param int width_pixels: the width (in pixels) of the terminal window
:param int height_pixels: the height (in pixels) of the terminal window
:param dict environment: the command's environment
:return: a new `.Channel` connected to the remote shell
:raises: `.SSHException` -- if the server fails to invoke a shell
"""
chan = self._transport.open_session()
chan.get_pty(term, width, height, width_pixels, height_pixels)
chan.invoke_shell()
return chan
def open_sftp(self):
"""
Open an SFTP session on the SSH server.
:return: a new `.SFTPClient` session object
"""
return self._transport.open_sftp_client()
def get_transport(self):
"""
Return the underlying `.Transport` object for this SSH connection.
This can be used to perform lower-level tasks, like opening specific
kinds of channels.
:return: the `.Transport` for this connection
"""
return self._transport
def _key_from_filepath(self, filename, klass, password):
"""
Attempt to derive a `.PKey` from given string path ``filename``:
- If ``filename`` appears to be a cert, the matching private key is
loaded.
- Otherwise, the filename is assumed to be a private key, and the
matching public cert will be loaded if it exists.
"""
cert_suffix = "-cert.pub"
# Assume privkey, not cert, by default
if filename.endswith(cert_suffix):
key_path = filename[: -len(cert_suffix)]
cert_path = filename
else:
key_path = filename
cert_path = filename + cert_suffix
# Blindly try the key path; if no private key, nothing will work.
key = klass.from_private_key_file(key_path, password)
# TODO: change this to 'Loading' instead of 'Trying' sometime; probably
# when #387 is released, since this is a critical log message users are
# likely testing/filtering for (bah.)
msg = "Trying discovered key {} in {}".format(
hexlify(key.get_fingerprint()), key_path
)
self._log(DEBUG, msg)
# Attempt to load cert if it exists.
if os.path.isfile(cert_path):
key.load_certificate(cert_path)
self._log(DEBUG, "Adding public certificate {}".format(cert_path))
return key
def _auth(
self,
username,
password,
pkey,
key_filenames,
allow_agent,
look_for_keys,
gss_auth,
gss_kex,
gss_deleg_creds,
gss_host,
passphrase,
):
"""
Try, in order:
- The key(s) passed in, if one was passed in.
- Any key we can find through an SSH agent (if allowed).
- Any "id_rsa", "id_dsa" or "id_ecdsa" key discoverable in ~/.ssh/
(if allowed).
- Plain username/password auth, if a password was given.
(The password might be needed to unlock a private key [if 'passphrase'
isn't also given], or for two-factor authentication [for which it is
required].)
"""
saved_exception = None
two_factor = False
allowed_types = set()
two_factor_types = {"keyboard-interactive", "password"}
if passphrase is None and password is not None:
passphrase = password
# If GSS-API support and GSS-PI Key Exchange was performed, we attempt
# authentication with gssapi-keyex.
if gss_kex and self._transport.gss_kex_used:
try:
self._transport.auth_gssapi_keyex(username)
return
except Exception as e:
saved_exception = e
# Try GSS-API authentication (gssapi-with-mic) only if GSS-API Key
# Exchange is not performed, because if we use GSS-API for the key
# exchange, there is already a fully established GSS-API context, so
# why should we do that again?
if gss_auth:
try:
return self._transport.auth_gssapi_with_mic(
username, gss_host, gss_deleg_creds
)
except Exception as e:
saved_exception = e
if pkey is not None:
try:
self._log(
DEBUG,
"Trying SSH key {}".format(
hexlify(pkey.get_fingerprint())
),
)
allowed_types = set(
self._transport.auth_publickey(username, pkey)
)
two_factor = allowed_types & two_factor_types
if not two_factor:
return
except SSHException as e:
saved_exception = e
if not two_factor:
for key_filename in key_filenames:
# TODO 4.0: leverage PKey.from_path() if we don't end up just
# killing SSHClient entirely
for pkey_class in (RSAKey, DSSKey, ECDSAKey, Ed25519Key):
try:
key = self._key_from_filepath(
key_filename, pkey_class, passphrase
)
allowed_types = set(
self._transport.auth_publickey(username, key)
)
two_factor = allowed_types & two_factor_types
if not two_factor:
return
break
except SSHException as e:
saved_exception = e
if not two_factor and allow_agent:
if self._agent is None:
self._agent = Agent()
for key in self._agent.get_keys():
try:
id_ = hexlify(key.get_fingerprint())
self._log(DEBUG, "Trying SSH agent key {}".format(id_))
# for 2-factor auth a successfully auth'd key password
# will return an allowed 2fac auth method
allowed_types = set(
self._transport.auth_publickey(username, key)
)
two_factor = allowed_types & two_factor_types
if not two_factor:
return
break
except SSHException as e:
saved_exception = e
if not two_factor:
keyfiles = []
for keytype, name in [
(RSAKey, "rsa"),
(DSSKey, "dsa"),
(ECDSAKey, "ecdsa"),
(Ed25519Key, "ed25519"),
]:
# ~/ssh/ is for windows
for directory in [".ssh", "ssh"]:
full_path = os.path.expanduser(
"~/{}/id_{}".format(directory, name)
)
if os.path.isfile(full_path):
# TODO: only do this append if below did not run
keyfiles.append((keytype, full_path))
if os.path.isfile(full_path + "-cert.pub"):
keyfiles.append((keytype, full_path + "-cert.pub"))
if not look_for_keys:
keyfiles = []
for pkey_class, filename in keyfiles:
try:
key = self._key_from_filepath(
filename, pkey_class, passphrase
)
# for 2-factor auth a successfully auth'd key will result
# in ['password']
allowed_types = set(
self._transport.auth_publickey(username, key)
)
two_factor = allowed_types & two_factor_types
if not two_factor:
return
break
except (SSHException, IOError) as e:
saved_exception = e
if password is not None:
try:
self._transport.auth_password(username, password)
return
except SSHException as e:
saved_exception = e
elif two_factor:
try:
self._transport.auth_interactive_dumb(username)
return
except SSHException as e:
saved_exception = e
# if we got an auth-failed exception earlier, re-raise it
if saved_exception is not None:
raise saved_exception
raise SSHException("No authentication methods available")
def _log(self, level, msg):
self._transport._log(level, msg)
class MissingHostKeyPolicy:
"""
Interface for defining the policy that `.SSHClient` should use when the
SSH server's hostname is not in either the system host keys or the
application's keys. Pre-made classes implement policies for automatically
adding the key to the application's `.HostKeys` object (`.AutoAddPolicy`),
and for automatically rejecting the key (`.RejectPolicy`).
This function may be used to ask the user to verify the key, for example.
"""
def missing_host_key(self, client, hostname, key):
"""
Called when an `.SSHClient` receives a server key for a server that
isn't in either the system or local `.HostKeys` object. To accept
the key, simply return. To reject, raised an exception (which will
be passed to the calling application).
"""
pass
class AutoAddPolicy(MissingHostKeyPolicy):
"""
Policy for automatically adding the hostname and new host key to the
local `.HostKeys` object, and saving it. This is used by `.SSHClient`.
"""
def missing_host_key(self, client, hostname, key):
client._host_keys.add(hostname, key.get_name(), key)
if client._host_keys_filename is not None:
client.save_host_keys(client._host_keys_filename)
client._log(
DEBUG,
"Adding {} host key for {}: {}".format(
key.get_name(), hostname, hexlify(key.get_fingerprint())
),
)
class RejectPolicy(MissingHostKeyPolicy):
"""
Policy for automatically rejecting the unknown hostname & key. This is
used by `.SSHClient`.
"""
def missing_host_key(self, client, hostname, key):
client._log(
DEBUG,
"Rejecting {} host key for {}: {}".format(
key.get_name(), hostname, hexlify(key.get_fingerprint())
),
)
raise SSHException(
"Server {!r} not found in known_hosts".format(hostname)
)
class WarningPolicy(MissingHostKeyPolicy):
"""
Policy for logging a Python-style warning for an unknown host key, but
accepting it. This is used by `.SSHClient`.
"""
def missing_host_key(self, client, hostname, key):
warnings.warn(
"Unknown {} host key for {}: {}".format(
key.get_name(), hostname, hexlify(key.get_fingerprint())
)
)
| 34,492 | Python | .py | 787 | 32.330368 | 79 | 0.588887 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
592 | compress.py | paramiko_paramiko/paramiko/compress.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Compression implementations for a Transport.
"""
import zlib
class ZlibCompressor:
def __init__(self):
# Use the default level of zlib compression
self.z = zlib.compressobj()
def __call__(self, data):
return self.z.compress(data) + self.z.flush(zlib.Z_FULL_FLUSH)
class ZlibDecompressor:
def __init__(self):
self.z = zlib.decompressobj()
def __call__(self, data):
return self.z.decompress(data)
| 1,282 | Python | .py | 32 | 37.0625 | 79 | 0.738325 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
593 | sftp.py | paramiko_paramiko/paramiko/sftp.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import select
import socket
import struct
from paramiko import util
from paramiko.common import DEBUG, byte_chr, byte_ord
from paramiko.message import Message
(
CMD_INIT,
CMD_VERSION,
CMD_OPEN,
CMD_CLOSE,
CMD_READ,
CMD_WRITE,
CMD_LSTAT,
CMD_FSTAT,
CMD_SETSTAT,
CMD_FSETSTAT,
CMD_OPENDIR,
CMD_READDIR,
CMD_REMOVE,
CMD_MKDIR,
CMD_RMDIR,
CMD_REALPATH,
CMD_STAT,
CMD_RENAME,
CMD_READLINK,
CMD_SYMLINK,
) = range(1, 21)
(CMD_STATUS, CMD_HANDLE, CMD_DATA, CMD_NAME, CMD_ATTRS) = range(101, 106)
(CMD_EXTENDED, CMD_EXTENDED_REPLY) = range(200, 202)
SFTP_OK = 0
(
SFTP_EOF,
SFTP_NO_SUCH_FILE,
SFTP_PERMISSION_DENIED,
SFTP_FAILURE,
SFTP_BAD_MESSAGE,
SFTP_NO_CONNECTION,
SFTP_CONNECTION_LOST,
SFTP_OP_UNSUPPORTED,
) = range(1, 9)
SFTP_DESC = [
"Success",
"End of file",
"No such file",
"Permission denied",
"Failure",
"Bad message",
"No connection",
"Connection lost",
"Operation unsupported",
]
SFTP_FLAG_READ = 0x1
SFTP_FLAG_WRITE = 0x2
SFTP_FLAG_APPEND = 0x4
SFTP_FLAG_CREATE = 0x8
SFTP_FLAG_TRUNC = 0x10
SFTP_FLAG_EXCL = 0x20
_VERSION = 3
# for debugging
CMD_NAMES = {
CMD_INIT: "init",
CMD_VERSION: "version",
CMD_OPEN: "open",
CMD_CLOSE: "close",
CMD_READ: "read",
CMD_WRITE: "write",
CMD_LSTAT: "lstat",
CMD_FSTAT: "fstat",
CMD_SETSTAT: "setstat",
CMD_FSETSTAT: "fsetstat",
CMD_OPENDIR: "opendir",
CMD_READDIR: "readdir",
CMD_REMOVE: "remove",
CMD_MKDIR: "mkdir",
CMD_RMDIR: "rmdir",
CMD_REALPATH: "realpath",
CMD_STAT: "stat",
CMD_RENAME: "rename",
CMD_READLINK: "readlink",
CMD_SYMLINK: "symlink",
CMD_STATUS: "status",
CMD_HANDLE: "handle",
CMD_DATA: "data",
CMD_NAME: "name",
CMD_ATTRS: "attrs",
CMD_EXTENDED: "extended",
CMD_EXTENDED_REPLY: "extended_reply",
}
# TODO: rewrite SFTP file/server modules' overly-flexible "make a request with
# xyz components" so we don't need this very silly method of signaling whether
# a given Python integer should be 32- or 64-bit.
# NOTE: this only became an issue when dropping Python 2 support; prior to
# doing so, we had to support actual-longs, which served as that signal. This
# is simply recreating that structure in a more tightly scoped fashion.
class int64(int):
pass
class SFTPError(Exception):
pass
class BaseSFTP:
def __init__(self):
self.logger = util.get_logger("paramiko.sftp")
self.sock = None
self.ultra_debug = False
# ...internals...
def _send_version(self):
m = Message()
m.add_int(_VERSION)
self._send_packet(CMD_INIT, m)
t, data = self._read_packet()
if t != CMD_VERSION:
raise SFTPError("Incompatible sftp protocol")
version = struct.unpack(">I", data[:4])[0]
# if version != _VERSION:
# raise SFTPError('Incompatible sftp protocol')
return version
def _send_server_version(self):
# winscp will freak out if the server sends version info before the
# client finishes sending INIT.
t, data = self._read_packet()
if t != CMD_INIT:
raise SFTPError("Incompatible sftp protocol")
version = struct.unpack(">I", data[:4])[0]
# advertise that we support "check-file"
extension_pairs = ["check-file", "md5,sha1"]
msg = Message()
msg.add_int(_VERSION)
msg.add(*extension_pairs)
self._send_packet(CMD_VERSION, msg)
return version
def _log(self, level, msg, *args):
self.logger.log(level, msg, *args)
def _write_all(self, out):
while len(out) > 0:
n = self.sock.send(out)
if n <= 0:
raise EOFError()
if n == len(out):
return
out = out[n:]
return
def _read_all(self, n):
out = bytes()
while n > 0:
if isinstance(self.sock, socket.socket):
# sometimes sftp is used directly over a socket instead of
# through a paramiko channel. in this case, check periodically
# if the socket is closed. (for some reason, recv() won't ever
# return or raise an exception, but calling select on a closed
# socket will.)
while True:
read, write, err = select.select([self.sock], [], [], 0.1)
if len(read) > 0:
x = self.sock.recv(n)
break
else:
x = self.sock.recv(n)
if len(x) == 0:
raise EOFError()
out += x
n -= len(x)
return out
def _send_packet(self, t, packet):
packet = packet.asbytes()
out = struct.pack(">I", len(packet) + 1) + byte_chr(t) + packet
if self.ultra_debug:
self._log(DEBUG, util.format_binary(out, "OUT: "))
self._write_all(out)
def _read_packet(self):
x = self._read_all(4)
# most sftp servers won't accept packets larger than about 32k, so
# anything with the high byte set (> 16MB) is just garbage.
if byte_ord(x[0]):
raise SFTPError("Garbage packet received")
size = struct.unpack(">I", x)[0]
data = self._read_all(size)
if self.ultra_debug:
self._log(DEBUG, util.format_binary(data, "IN: "))
if size > 0:
t = byte_ord(data[0])
return t, data[1:]
return 0, bytes()
| 6,471 | Python | .py | 199 | 25.944724 | 79 | 0.613735 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
594 | message.py | paramiko_paramiko/paramiko/message.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Implementation of an SSH2 "message".
"""
import struct
from io import BytesIO
from paramiko import util
from paramiko.common import zero_byte, max_byte, one_byte
from paramiko.util import u
class Message:
"""
An SSH2 message is a stream of bytes that encodes some combination of
strings, integers, bools, and infinite-precision integers. This class
builds or breaks down such a byte stream.
Normally you don't need to deal with anything this low-level, but it's
exposed for people implementing custom extensions, or features that
paramiko doesn't support yet.
"""
big_int = 0xFF000000
def __init__(self, content=None):
"""
Create a new SSH2 message.
:param bytes content:
the byte stream to use as the message content (passed in only when
decomposing a message).
"""
if content is not None:
self.packet = BytesIO(content)
else:
self.packet = BytesIO()
def __bytes__(self):
return self.asbytes()
def __repr__(self):
"""
Returns a string representation of this object, for debugging.
"""
return "paramiko.Message(" + repr(self.packet.getvalue()) + ")"
# TODO 4.0: just merge into __bytes__ (everywhere)
def asbytes(self):
"""
Return the byte stream content of this Message, as a `bytes`.
"""
return self.packet.getvalue()
def rewind(self):
"""
Rewind the message to the beginning as if no items had been parsed
out of it yet.
"""
self.packet.seek(0)
def get_remainder(self):
"""
Return the `bytes` of this message that haven't already been parsed and
returned.
"""
position = self.packet.tell()
remainder = self.packet.read()
self.packet.seek(position)
return remainder
def get_so_far(self):
"""
Returns the `bytes` of this message that have been parsed and
returned. The string passed into a message's constructor can be
regenerated by concatenating ``get_so_far`` and `get_remainder`.
"""
position = self.packet.tell()
self.rewind()
return self.packet.read(position)
def get_bytes(self, n):
"""
Return the next ``n`` bytes of the message, without decomposing into an
int, decoded string, etc. Just the raw bytes are returned. Returns a
string of ``n`` zero bytes if there weren't ``n`` bytes remaining in
the message.
"""
b = self.packet.read(n)
max_pad_size = 1 << 20 # Limit padding to 1 MB
if len(b) < n < max_pad_size:
return b + zero_byte * (n - len(b))
return b
def get_byte(self):
"""
Return the next byte of the message, without decomposing it. This
is equivalent to `get_bytes(1) <get_bytes>`.
:return:
the next (`bytes`) byte of the message, or ``b'\000'`` if there
aren't any bytes remaining.
"""
return self.get_bytes(1)
def get_boolean(self):
"""
Fetch a boolean from the stream.
"""
b = self.get_bytes(1)
return b != zero_byte
def get_adaptive_int(self):
"""
Fetch an int from the stream.
:return: a 32-bit unsigned `int`.
"""
byte = self.get_bytes(1)
if byte == max_byte:
return util.inflate_long(self.get_binary())
byte += self.get_bytes(3)
return struct.unpack(">I", byte)[0]
def get_int(self):
"""
Fetch an int from the stream.
"""
return struct.unpack(">I", self.get_bytes(4))[0]
def get_int64(self):
"""
Fetch a 64-bit int from the stream.
:return: a 64-bit unsigned integer (`int`).
"""
return struct.unpack(">Q", self.get_bytes(8))[0]
def get_mpint(self):
"""
Fetch a long int (mpint) from the stream.
:return: an arbitrary-length integer (`int`).
"""
return util.inflate_long(self.get_binary())
# TODO 4.0: depending on where this is used internally or downstream, force
# users to specify get_binary instead and delete this.
def get_string(self):
"""
Fetch a "string" from the stream. This will actually be a `bytes`
object, and may contain unprintable characters. (It's not unheard of
for a string to contain another byte-stream message.)
"""
return self.get_bytes(self.get_int())
# TODO 4.0: also consider having this take over the get_string name, and
# remove this name instead.
def get_text(self):
"""
Fetch a Unicode string from the stream.
This currently operates by attempting to encode the next "string" as
``utf-8``.
"""
return u(self.get_string())
def get_binary(self):
"""
Alias for `get_string` (obtains a bytestring).
"""
return self.get_bytes(self.get_int())
def get_list(self):
"""
Fetch a list of `strings <str>` from the stream.
These are trivially encoded as comma-separated values in a string.
"""
return self.get_text().split(",")
def add_bytes(self, b):
"""
Write bytes to the stream, without any formatting.
:param bytes b: bytes to add
"""
self.packet.write(b)
return self
def add_byte(self, b):
"""
Write a single byte to the stream, without any formatting.
:param bytes b: byte to add
"""
self.packet.write(b)
return self
def add_boolean(self, b):
"""
Add a boolean value to the stream.
:param bool b: boolean value to add
"""
if b:
self.packet.write(one_byte)
else:
self.packet.write(zero_byte)
return self
def add_int(self, n):
"""
Add an integer to the stream.
:param int n: integer to add
"""
self.packet.write(struct.pack(">I", n))
return self
def add_adaptive_int(self, n):
"""
Add an integer to the stream.
:param int n: integer to add
"""
if n >= Message.big_int:
self.packet.write(max_byte)
self.add_string(util.deflate_long(n))
else:
self.packet.write(struct.pack(">I", n))
return self
def add_int64(self, n):
"""
Add a 64-bit int to the stream.
:param int n: long int to add
"""
self.packet.write(struct.pack(">Q", n))
return self
def add_mpint(self, z):
"""
Add a long int to the stream, encoded as an infinite-precision
integer. This method only works on positive numbers.
:param int z: long int to add
"""
self.add_string(util.deflate_long(z))
return self
# TODO: see the TODO for get_string/get_text/et al, this should change
# to match.
def add_string(self, s):
"""
Add a bytestring to the stream.
:param byte s: bytestring to add
"""
s = util.asbytes(s)
self.add_int(len(s))
self.packet.write(s)
return self
def add_list(self, l): # noqa: E741
"""
Add a list of strings to the stream. They are encoded identically to
a single string of values separated by commas. (Yes, really, that's
how SSH2 does it.)
:param l: list of strings to add
"""
self.add_string(",".join(l))
return self
def _add(self, i):
if type(i) is bool:
return self.add_boolean(i)
elif isinstance(i, int):
return self.add_adaptive_int(i)
elif type(i) is list:
return self.add_list(i)
else:
return self.add_string(i)
# TODO: this would never have worked for unicode strings under Python 3,
# guessing nobody/nothing ever used it for that purpose?
def add(self, *seq):
"""
Add a sequence of items to the stream. The values are encoded based
on their type: bytes, str, int, bool, or list.
.. warning::
Longs are encoded non-deterministically. Don't use this method.
:param seq: the sequence of items
"""
for item in seq:
self._add(item)
| 9,349 | Python | .py | 264 | 27.420455 | 79 | 0.600266 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
595 | proxy.py | paramiko_paramiko/paramiko/proxy.py | # Copyright (C) 2012 Yipit, Inc <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
import shlex
import signal
from select import select
import socket
import time
# Try-and-ignore import so platforms w/o subprocess (eg Google App Engine) can
# still import paramiko.
subprocess, subprocess_import_error = None, None
try:
import subprocess
except ImportError as e:
subprocess_import_error = e
from paramiko.ssh_exception import ProxyCommandFailure
from paramiko.util import ClosingContextManager
class ProxyCommand(ClosingContextManager):
"""
Wraps a subprocess running ProxyCommand-driven programs.
This class implements a the socket-like interface needed by the
`.Transport` and `.Packetizer` classes. Using this class instead of a
regular socket makes it possible to talk with a Popen'd command that will
proxy traffic between the client and a server hosted in another machine.
Instances of this class may be used as context managers.
"""
def __init__(self, command_line):
"""
Create a new CommandProxy instance. The instance created by this
class can be passed as an argument to the `.Transport` class.
:param str command_line:
the command that should be executed and used as the proxy.
"""
if subprocess is None:
raise subprocess_import_error
self.cmd = shlex.split(command_line)
self.process = subprocess.Popen(
self.cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
bufsize=0,
)
self.timeout = None
def send(self, content):
"""
Write the content received from the SSH client to the standard
input of the forked command.
:param str content: string to be sent to the forked command
"""
try:
self.process.stdin.write(content)
except IOError as e:
# There was a problem with the child process. It probably
# died and we can't proceed. The best option here is to
# raise an exception informing the user that the informed
# ProxyCommand is not working.
raise ProxyCommandFailure(" ".join(self.cmd), e.strerror)
return len(content)
def recv(self, size):
"""
Read from the standard output of the forked program.
:param int size: how many chars should be read
:return: the string of bytes read, which may be shorter than requested
"""
try:
buffer = b""
start = time.time()
while len(buffer) < size:
select_timeout = None
if self.timeout is not None:
elapsed = time.time() - start
if elapsed >= self.timeout:
raise socket.timeout()
select_timeout = self.timeout - elapsed
r, w, x = select([self.process.stdout], [], [], select_timeout)
if r and r[0] == self.process.stdout:
buffer += os.read(
self.process.stdout.fileno(), size - len(buffer)
)
return buffer
except socket.timeout:
if buffer:
# Don't raise socket.timeout, return partial result instead
return buffer
raise # socket.timeout is a subclass of IOError
except IOError as e:
raise ProxyCommandFailure(" ".join(self.cmd), e.strerror)
def close(self):
os.kill(self.process.pid, signal.SIGTERM)
@property
def closed(self):
return self.process.returncode is not None
@property
def _closed(self):
# Concession to Python 3 socket-like API
return self.closed
def settimeout(self, timeout):
self.timeout = timeout
| 4,648 | Python | .py | 114 | 32.508772 | 79 | 0.651307 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
596 | util.py | paramiko_paramiko/paramiko/util.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Useful functions used by the rest of paramiko.
"""
import sys
import struct
import traceback
import threading
import logging
from paramiko.common import (
DEBUG,
zero_byte,
xffffffff,
max_byte,
byte_ord,
byte_chr,
)
from paramiko.config import SSHConfig
def inflate_long(s, always_positive=False):
"""turns a normalized byte string into a long-int
(adapted from Crypto.Util.number)"""
out = 0
negative = 0
if not always_positive and (len(s) > 0) and (byte_ord(s[0]) >= 0x80):
negative = 1
if len(s) % 4:
filler = zero_byte
if negative:
filler = max_byte
# never convert this to ``s +=`` because this is a string, not a number
# noinspection PyAugmentAssignment
s = filler * (4 - len(s) % 4) + s
for i in range(0, len(s), 4):
out = (out << 32) + struct.unpack(">I", s[i : i + 4])[0]
if negative:
out -= 1 << (8 * len(s))
return out
def deflate_long(n, add_sign_padding=True):
"""turns a long-int into a normalized byte string
(adapted from Crypto.Util.number)"""
# after much testing, this algorithm was deemed to be the fastest
s = bytes()
n = int(n)
while (n != 0) and (n != -1):
s = struct.pack(">I", n & xffffffff) + s
n >>= 32
# strip off leading zeros, FFs
for i in enumerate(s):
if (n == 0) and (i[1] != 0):
break
if (n == -1) and (i[1] != 0xFF):
break
else:
# degenerate case, n was either 0 or -1
i = (0,)
if n == 0:
s = zero_byte
else:
s = max_byte
s = s[i[0] :]
if add_sign_padding:
if (n == 0) and (byte_ord(s[0]) >= 0x80):
s = zero_byte + s
if (n == -1) and (byte_ord(s[0]) < 0x80):
s = max_byte + s
return s
def format_binary(data, prefix=""):
x = 0
out = []
while len(data) > x + 16:
out.append(format_binary_line(data[x : x + 16]))
x += 16
if x < len(data):
out.append(format_binary_line(data[x:]))
return [prefix + line for line in out]
def format_binary_line(data):
left = " ".join(["{:02X}".format(byte_ord(c)) for c in data])
right = "".join(
[".{:c}..".format(byte_ord(c))[(byte_ord(c) + 63) // 95] for c in data]
)
return "{:50s} {}".format(left, right)
def safe_string(s):
out = b""
for c in s:
i = byte_ord(c)
if 32 <= i <= 127:
out += byte_chr(i)
else:
out += b("%{:02X}".format(i))
return out
def bit_length(n):
try:
return n.bit_length()
except AttributeError:
norm = deflate_long(n, False)
hbyte = byte_ord(norm[0])
if hbyte == 0:
return 1
bitlen = len(norm) * 8
while not (hbyte & 0x80):
hbyte <<= 1
bitlen -= 1
return bitlen
def tb_strings():
return "".join(traceback.format_exception(*sys.exc_info())).split("\n")
def generate_key_bytes(hash_alg, salt, key, nbytes):
"""
Given a password, passphrase, or other human-source key, scramble it
through a secure hash into some keyworthy bytes. This specific algorithm
is used for encrypting/decrypting private key files.
:param function hash_alg: A function which creates a new hash object, such
as ``hashlib.sha256``.
:param salt: data to salt the hash with.
:type bytes salt: Hash salt bytes.
:param str key: human-entered password or passphrase.
:param int nbytes: number of bytes to generate.
:return: Key data, as `bytes`.
"""
keydata = bytes()
digest = bytes()
if len(salt) > 8:
salt = salt[:8]
while nbytes > 0:
hash_obj = hash_alg()
if len(digest) > 0:
hash_obj.update(digest)
hash_obj.update(b(key))
hash_obj.update(salt)
digest = hash_obj.digest()
size = min(nbytes, len(digest))
keydata += digest[:size]
nbytes -= size
return keydata
def load_host_keys(filename):
"""
Read a file of known SSH host keys, in the format used by openssh, and
return a compound dict of ``hostname -> keytype ->`` `PKey
<paramiko.pkey.PKey>`. The hostname may be an IP address or DNS name. The
keytype will be either ``"ssh-rsa"`` or ``"ssh-dss"``.
This type of file unfortunately doesn't exist on Windows, but on posix,
it will usually be stored in ``os.path.expanduser("~/.ssh/known_hosts")``.
Since 1.5.3, this is just a wrapper around `.HostKeys`.
:param str filename: name of the file to read host keys from
:return:
nested dict of `.PKey` objects, indexed by hostname and then keytype
"""
from paramiko.hostkeys import HostKeys
return HostKeys(filename)
def parse_ssh_config(file_obj):
"""
Provided only as a backward-compatible wrapper around `.SSHConfig`.
.. deprecated:: 2.7
Use `SSHConfig.from_file` instead.
"""
config = SSHConfig()
config.parse(file_obj)
return config
def lookup_ssh_host_config(hostname, config):
"""
Provided only as a backward-compatible wrapper around `.SSHConfig`.
"""
return config.lookup(hostname)
def mod_inverse(x, m):
# it's crazy how small Python can make this function.
u1, u2, u3 = 1, 0, m
v1, v2, v3 = 0, 1, x
while v3 > 0:
q = u3 // v3
u1, v1 = v1, u1 - v1 * q
u2, v2 = v2, u2 - v2 * q
u3, v3 = v3, u3 - v3 * q
if u2 < 0:
u2 += m
return u2
_g_thread_data = threading.local()
_g_thread_counter = 0
_g_thread_lock = threading.Lock()
def get_thread_id():
global _g_thread_data, _g_thread_counter, _g_thread_lock
try:
return _g_thread_data.id
except AttributeError:
with _g_thread_lock:
_g_thread_counter += 1
_g_thread_data.id = _g_thread_counter
return _g_thread_data.id
def log_to_file(filename, level=DEBUG):
"""send paramiko logs to a logfile,
if they're not already going somewhere"""
logger = logging.getLogger("paramiko")
if len(logger.handlers) > 0:
return
logger.setLevel(level)
f = open(filename, "a")
handler = logging.StreamHandler(f)
frm = "%(levelname)-.3s [%(asctime)s.%(msecs)03d] thr=%(_threadid)-3d"
frm += " %(name)s: %(message)s"
handler.setFormatter(logging.Formatter(frm, "%Y%m%d-%H:%M:%S"))
logger.addHandler(handler)
# make only one filter object, so it doesn't get applied more than once
class PFilter:
def filter(self, record):
record._threadid = get_thread_id()
return True
_pfilter = PFilter()
def get_logger(name):
logger = logging.getLogger(name)
logger.addFilter(_pfilter)
return logger
def constant_time_bytes_eq(a, b):
if len(a) != len(b):
return False
res = 0
# noinspection PyUnresolvedReferences
for i in range(len(a)): # noqa: F821
res |= byte_ord(a[i]) ^ byte_ord(b[i])
return res == 0
class ClosingContextManager:
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def clamp_value(minimum, val, maximum):
return max(minimum, min(val, maximum))
def asbytes(s):
"""
Coerce to bytes if possible or return unchanged.
"""
try:
# Attempt to run through our version of b(), which does the Right Thing
# for unicode strings vs bytestrings, and raises TypeError if it's not
# one of those types.
return b(s)
except TypeError:
try:
# If it wasn't a string/byte/buffer-ish object, try calling an
# asbytes() method, which many of our internal classes implement.
return s.asbytes()
except AttributeError:
# Finally, just do nothing & assume this object is sufficiently
# byte-y or buffer-y that everything will work out (or that callers
# are capable of handling whatever it is.)
return s
# TODO: clean this up / force callers to assume bytes OR unicode
def b(s, encoding="utf8"):
"""cast unicode or bytes to bytes"""
if isinstance(s, bytes):
return s
elif isinstance(s, str):
return s.encode(encoding)
else:
raise TypeError(f"Expected unicode or bytes, got {type(s)}")
# TODO: clean this up / force callers to assume bytes OR unicode
def u(s, encoding="utf8"):
"""cast bytes or unicode to unicode"""
if isinstance(s, bytes):
return s.decode(encoding)
elif isinstance(s, str):
return s
else:
raise TypeError(f"Expected unicode or bytes, got {type(s)}")
| 9,550 | Python | .py | 277 | 28.422383 | 79 | 0.621296 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
597 | channel.py | paramiko_paramiko/paramiko/channel.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Abstraction for an SSH2 channel.
"""
import binascii
import os
import socket
import time
import threading
from functools import wraps
from paramiko import util
from paramiko.common import (
cMSG_CHANNEL_REQUEST,
cMSG_CHANNEL_WINDOW_ADJUST,
cMSG_CHANNEL_DATA,
cMSG_CHANNEL_EXTENDED_DATA,
DEBUG,
ERROR,
cMSG_CHANNEL_SUCCESS,
cMSG_CHANNEL_FAILURE,
cMSG_CHANNEL_EOF,
cMSG_CHANNEL_CLOSE,
)
from paramiko.message import Message
from paramiko.ssh_exception import SSHException
from paramiko.file import BufferedFile
from paramiko.buffered_pipe import BufferedPipe, PipeTimeout
from paramiko import pipe
from paramiko.util import ClosingContextManager
def open_only(func):
"""
Decorator for `.Channel` methods which performs an openness check.
:raises:
`.SSHException` -- If the wrapped method is called on an unopened
`.Channel`.
"""
@wraps(func)
def _check(self, *args, **kwds):
if (
self.closed
or self.eof_received
or self.eof_sent
or not self.active
):
raise SSHException("Channel is not open")
return func(self, *args, **kwds)
return _check
class Channel(ClosingContextManager):
"""
A secure tunnel across an SSH `.Transport`. A Channel is meant to behave
like a socket, and has an API that should be indistinguishable from the
Python socket API.
Because SSH2 has a windowing kind of flow control, if you stop reading data
from a Channel and its buffer fills up, the server will be unable to send
you any more data until you read some of it. (This won't affect other
channels on the same transport -- all channels on a single transport are
flow-controlled independently.) Similarly, if the server isn't reading
data you send, calls to `send` may block, unless you set a timeout. This
is exactly like a normal network socket, so it shouldn't be too surprising.
Instances of this class may be used as context managers.
"""
def __init__(self, chanid):
"""
Create a new channel. The channel is not associated with any
particular session or `.Transport` until the Transport attaches it.
Normally you would only call this method from the constructor of a
subclass of `.Channel`.
:param int chanid:
the ID of this channel, as passed by an existing `.Transport`.
"""
#: Channel ID
self.chanid = chanid
#: Remote channel ID
self.remote_chanid = 0
#: `.Transport` managing this channel
self.transport = None
#: Whether the connection is presently active
self.active = False
self.eof_received = 0
self.eof_sent = 0
self.in_buffer = BufferedPipe()
self.in_stderr_buffer = BufferedPipe()
self.timeout = None
#: Whether the connection has been closed
self.closed = False
self.ultra_debug = False
self.lock = threading.Lock()
self.out_buffer_cv = threading.Condition(self.lock)
self.in_window_size = 0
self.out_window_size = 0
self.in_max_packet_size = 0
self.out_max_packet_size = 0
self.in_window_threshold = 0
self.in_window_sofar = 0
self.status_event = threading.Event()
self._name = str(chanid)
self.logger = util.get_logger("paramiko.transport")
self._pipe = None
self.event = threading.Event()
self.event_ready = False
self.combine_stderr = False
self.exit_status = -1
self.origin_addr = None
def __del__(self):
try:
self.close()
except:
pass
def __repr__(self):
"""
Return a string representation of this object, for debugging.
"""
out = "<paramiko.Channel {}".format(self.chanid)
if self.closed:
out += " (closed)"
elif self.active:
if self.eof_received:
out += " (EOF received)"
if self.eof_sent:
out += " (EOF sent)"
out += " (open) window={}".format(self.out_window_size)
if len(self.in_buffer) > 0:
out += " in-buffer={}".format(len(self.in_buffer))
out += " -> " + repr(self.transport)
out += ">"
return out
@open_only
def get_pty(
self,
term="vt100",
width=80,
height=24,
width_pixels=0,
height_pixels=0,
):
"""
Request a pseudo-terminal from the server. This is usually used right
after creating a client channel, to ask the server to provide some
basic terminal semantics for a shell invoked with `invoke_shell`.
It isn't necessary (or desirable) to call this method if you're going
to execute a single command with `exec_command`.
:param str term: the terminal type to emulate
(for example, ``'vt100'``)
:param int width: width (in characters) of the terminal screen
:param int height: height (in characters) of the terminal screen
:param int width_pixels: width (in pixels) of the terminal screen
:param int height_pixels: height (in pixels) of the terminal screen
:raises:
`.SSHException` -- if the request was rejected or the channel was
closed
"""
m = Message()
m.add_byte(cMSG_CHANNEL_REQUEST)
m.add_int(self.remote_chanid)
m.add_string("pty-req")
m.add_boolean(True)
m.add_string(term)
m.add_int(width)
m.add_int(height)
m.add_int(width_pixels)
m.add_int(height_pixels)
m.add_string(bytes())
self._event_pending()
self.transport._send_user_message(m)
self._wait_for_event()
@open_only
def invoke_shell(self):
"""
Request an interactive shell session on this channel. If the server
allows it, the channel will then be directly connected to the stdin,
stdout, and stderr of the shell.
Normally you would call `get_pty` before this, in which case the
shell will operate through the pty, and the channel will be connected
to the stdin and stdout of the pty.
When the shell exits, the channel will be closed and can't be reused.
You must open a new channel if you wish to open another shell.
:raises:
`.SSHException` -- if the request was rejected or the channel was
closed
"""
m = Message()
m.add_byte(cMSG_CHANNEL_REQUEST)
m.add_int(self.remote_chanid)
m.add_string("shell")
m.add_boolean(True)
self._event_pending()
self.transport._send_user_message(m)
self._wait_for_event()
@open_only
def exec_command(self, command):
"""
Execute a command on the server. If the server allows it, the channel
will then be directly connected to the stdin, stdout, and stderr of
the command being executed.
When the command finishes executing, the channel will be closed and
can't be reused. You must open a new channel if you wish to execute
another command.
:param str command: a shell command to execute.
:raises:
`.SSHException` -- if the request was rejected or the channel was
closed
"""
m = Message()
m.add_byte(cMSG_CHANNEL_REQUEST)
m.add_int(self.remote_chanid)
m.add_string("exec")
m.add_boolean(True)
m.add_string(command)
self._event_pending()
self.transport._send_user_message(m)
self._wait_for_event()
@open_only
def invoke_subsystem(self, subsystem):
"""
Request a subsystem on the server (for example, ``sftp``). If the
server allows it, the channel will then be directly connected to the
requested subsystem.
When the subsystem finishes, the channel will be closed and can't be
reused.
:param str subsystem: name of the subsystem being requested.
:raises:
`.SSHException` -- if the request was rejected or the channel was
closed
"""
m = Message()
m.add_byte(cMSG_CHANNEL_REQUEST)
m.add_int(self.remote_chanid)
m.add_string("subsystem")
m.add_boolean(True)
m.add_string(subsystem)
self._event_pending()
self.transport._send_user_message(m)
self._wait_for_event()
@open_only
def resize_pty(self, width=80, height=24, width_pixels=0, height_pixels=0):
"""
Resize the pseudo-terminal. This can be used to change the width and
height of the terminal emulation created in a previous `get_pty` call.
:param int width: new width (in characters) of the terminal screen
:param int height: new height (in characters) of the terminal screen
:param int width_pixels: new width (in pixels) of the terminal screen
:param int height_pixels: new height (in pixels) of the terminal screen
:raises:
`.SSHException` -- if the request was rejected or the channel was
closed
"""
m = Message()
m.add_byte(cMSG_CHANNEL_REQUEST)
m.add_int(self.remote_chanid)
m.add_string("window-change")
m.add_boolean(False)
m.add_int(width)
m.add_int(height)
m.add_int(width_pixels)
m.add_int(height_pixels)
self.transport._send_user_message(m)
@open_only
def update_environment(self, environment):
"""
Updates this channel's remote shell environment.
.. note::
This operation is additive - i.e. the current environment is not
reset before the given environment variables are set.
.. warning::
Servers may silently reject some environment variables; see the
warning in `set_environment_variable` for details.
:param dict environment:
a dictionary containing the name and respective values to set
:raises:
`.SSHException` -- if any of the environment variables was rejected
by the server or the channel was closed
"""
for name, value in environment.items():
try:
self.set_environment_variable(name, value)
except SSHException as e:
err = 'Failed to set environment variable "{}".'
raise SSHException(err.format(name), e)
@open_only
def set_environment_variable(self, name, value):
"""
Set the value of an environment variable.
.. warning::
The server may reject this request depending on its ``AcceptEnv``
setting; such rejections will fail silently (which is common client
practice for this particular request type). Make sure you
understand your server's configuration before using!
:param str name: name of the environment variable
:param str value: value of the environment variable
:raises:
`.SSHException` -- if the request was rejected or the channel was
closed
"""
m = Message()
m.add_byte(cMSG_CHANNEL_REQUEST)
m.add_int(self.remote_chanid)
m.add_string("env")
m.add_boolean(False)
m.add_string(name)
m.add_string(value)
self.transport._send_user_message(m)
def exit_status_ready(self):
"""
Return true if the remote process has exited and returned an exit
status. You may use this to poll the process status if you don't
want to block in `recv_exit_status`. Note that the server may not
return an exit status in some cases (like bad servers).
:return:
``True`` if `recv_exit_status` will return immediately, else
``False``.
.. versionadded:: 1.7.3
"""
return self.closed or self.status_event.is_set()
def recv_exit_status(self):
"""
Return the exit status from the process on the server. This is
mostly useful for retrieving the results of an `exec_command`.
If the command hasn't finished yet, this method will wait until
it does, or until the channel is closed. If no exit status is
provided by the server, -1 is returned.
.. warning::
In some situations, receiving remote output larger than the current
`.Transport` or session's ``window_size`` (e.g. that set by the
``default_window_size`` kwarg for `.Transport.__init__`) will cause
`.recv_exit_status` to hang indefinitely if it is called prior to a
sufficiently large `.Channel.recv` (or if there are no threads
calling `.Channel.recv` in the background).
In these cases, ensuring that `.recv_exit_status` is called *after*
`.Channel.recv` (or, again, using threads) can avoid the hang.
:return: the exit code (as an `int`) of the process on the server.
.. versionadded:: 1.2
"""
self.status_event.wait()
assert self.status_event.is_set()
return self.exit_status
def send_exit_status(self, status):
"""
Send the exit status of an executed command to the client. (This
really only makes sense in server mode.) Many clients expect to
get some sort of status code back from an executed command after
it completes.
:param int status: the exit code of the process
.. versionadded:: 1.2
"""
# in many cases, the channel will not still be open here.
# that's fine.
m = Message()
m.add_byte(cMSG_CHANNEL_REQUEST)
m.add_int(self.remote_chanid)
m.add_string("exit-status")
m.add_boolean(False)
m.add_int(status)
self.transport._send_user_message(m)
@open_only
def request_x11(
self,
screen_number=0,
auth_protocol=None,
auth_cookie=None,
single_connection=False,
handler=None,
):
"""
Request an x11 session on this channel. If the server allows it,
further x11 requests can be made from the server to the client,
when an x11 application is run in a shell session.
From :rfc:`4254`::
It is RECOMMENDED that the 'x11 authentication cookie' that is
sent be a fake, random cookie, and that the cookie be checked and
replaced by the real cookie when a connection request is received.
If you omit the auth_cookie, a new secure random 128-bit value will be
generated, used, and returned. You will need to use this value to
verify incoming x11 requests and replace them with the actual local
x11 cookie (which requires some knowledge of the x11 protocol).
If a handler is passed in, the handler is called from another thread
whenever a new x11 connection arrives. The default handler queues up
incoming x11 connections, which may be retrieved using
`.Transport.accept`. The handler's calling signature is::
handler(channel: Channel, (address: str, port: int))
:param int screen_number: the x11 screen number (0, 10, etc.)
:param str auth_protocol:
the name of the X11 authentication method used; if none is given,
``"MIT-MAGIC-COOKIE-1"`` is used
:param str auth_cookie:
hexadecimal string containing the x11 auth cookie; if none is
given, a secure random 128-bit value is generated
:param bool single_connection:
if True, only a single x11 connection will be forwarded (by
default, any number of x11 connections can arrive over this
session)
:param handler:
an optional callable handler to use for incoming X11 connections
:return: the auth_cookie used
"""
if auth_protocol is None:
auth_protocol = "MIT-MAGIC-COOKIE-1"
if auth_cookie is None:
auth_cookie = binascii.hexlify(os.urandom(16))
m = Message()
m.add_byte(cMSG_CHANNEL_REQUEST)
m.add_int(self.remote_chanid)
m.add_string("x11-req")
m.add_boolean(True)
m.add_boolean(single_connection)
m.add_string(auth_protocol)
m.add_string(auth_cookie)
m.add_int(screen_number)
self._event_pending()
self.transport._send_user_message(m)
self._wait_for_event()
self.transport._set_x11_handler(handler)
return auth_cookie
@open_only
def request_forward_agent(self, handler):
"""
Request for a forward SSH Agent on this channel.
This is only valid for an ssh-agent from OpenSSH !!!
:param handler:
a required callable handler to use for incoming SSH Agent
connections
:return: True if we are ok, else False
(at that time we always return ok)
:raises: SSHException in case of channel problem.
"""
m = Message()
m.add_byte(cMSG_CHANNEL_REQUEST)
m.add_int(self.remote_chanid)
m.add_string("[email protected]")
m.add_boolean(False)
self.transport._send_user_message(m)
self.transport._set_forward_agent_handler(handler)
return True
def get_transport(self):
"""
Return the `.Transport` associated with this channel.
"""
return self.transport
def set_name(self, name):
"""
Set a name for this channel. Currently it's only used to set the name
of the channel in logfile entries. The name can be fetched with the
`get_name` method.
:param str name: new channel name
"""
self._name = name
def get_name(self):
"""
Get the name of this channel that was previously set by `set_name`.
"""
return self._name
def get_id(self):
"""
Return the `int` ID # for this channel.
The channel ID is unique across a `.Transport` and usually a small
number. It's also the number passed to
`.ServerInterface.check_channel_request` when determining whether to
accept a channel request in server mode.
"""
return self.chanid
def set_combine_stderr(self, combine):
"""
Set whether stderr should be combined into stdout on this channel.
The default is ``False``, but in some cases it may be convenient to
have both streams combined.
If this is ``False``, and `exec_command` is called (or ``invoke_shell``
with no pty), output to stderr will not show up through the `recv`
and `recv_ready` calls. You will have to use `recv_stderr` and
`recv_stderr_ready` to get stderr output.
If this is ``True``, data will never show up via `recv_stderr` or
`recv_stderr_ready`.
:param bool combine:
``True`` if stderr output should be combined into stdout on this
channel.
:return: the previous setting (a `bool`).
.. versionadded:: 1.1
"""
data = bytes()
self.lock.acquire()
try:
old = self.combine_stderr
self.combine_stderr = combine
if combine and not old:
# copy old stderr buffer into primary buffer
data = self.in_stderr_buffer.empty()
finally:
self.lock.release()
if len(data) > 0:
self._feed(data)
return old
# ...socket API...
def settimeout(self, timeout):
"""
Set a timeout on blocking read/write operations. The ``timeout``
argument can be a nonnegative float expressing seconds, or ``None``.
If a float is given, subsequent channel read/write operations will
raise a timeout exception if the timeout period value has elapsed
before the operation has completed. Setting a timeout of ``None``
disables timeouts on socket operations.
``chan.settimeout(0.0)`` is equivalent to ``chan.setblocking(0)``;
``chan.settimeout(None)`` is equivalent to ``chan.setblocking(1)``.
:param float timeout:
seconds to wait for a pending read/write operation before raising
``socket.timeout``, or ``None`` for no timeout.
"""
self.timeout = timeout
def gettimeout(self):
"""
Returns the timeout in seconds (as a float) associated with socket
operations, or ``None`` if no timeout is set. This reflects the last
call to `setblocking` or `settimeout`.
"""
return self.timeout
def setblocking(self, blocking):
"""
Set blocking or non-blocking mode of the channel: if ``blocking`` is 0,
the channel is set to non-blocking mode; otherwise it's set to blocking
mode. Initially all channels are in blocking mode.
In non-blocking mode, if a `recv` call doesn't find any data, or if a
`send` call can't immediately dispose of the data, an error exception
is raised. In blocking mode, the calls block until they can proceed. An
EOF condition is considered "immediate data" for `recv`, so if the
channel is closed in the read direction, it will never block.
``chan.setblocking(0)`` is equivalent to ``chan.settimeout(0)``;
``chan.setblocking(1)`` is equivalent to ``chan.settimeout(None)``.
:param int blocking:
0 to set non-blocking mode; non-0 to set blocking mode.
"""
if blocking:
self.settimeout(None)
else:
self.settimeout(0.0)
def getpeername(self):
"""
Return the address of the remote side of this Channel, if possible.
This simply wraps `.Transport.getpeername`, used to provide enough of a
socket-like interface to allow asyncore to work. (asyncore likes to
call ``'getpeername'``.)
"""
return self.transport.getpeername()
def close(self):
"""
Close the channel. All future read/write operations on the channel
will fail. The remote end will receive no more data (after queued data
is flushed). Channels are automatically closed when their `.Transport`
is closed or when they are garbage collected.
"""
self.lock.acquire()
try:
# only close the pipe when the user explicitly closes the channel.
# otherwise they will get unpleasant surprises. (and do it before
# checking self.closed, since the remote host may have already
# closed the connection.)
if self._pipe is not None:
self._pipe.close()
self._pipe = None
if not self.active or self.closed:
return
msgs = self._close_internal()
finally:
self.lock.release()
for m in msgs:
if m is not None:
self.transport._send_user_message(m)
def recv_ready(self):
"""
Returns true if data is buffered and ready to be read from this
channel. A ``False`` result does not mean that the channel has closed;
it means you may need to wait before more data arrives.
:return:
``True`` if a `recv` call on this channel would immediately return
at least one byte; ``False`` otherwise.
"""
return self.in_buffer.read_ready()
def recv(self, nbytes):
"""
Receive data from the channel. The return value is a string
representing the data received. The maximum amount of data to be
received at once is specified by ``nbytes``. If a string of
length zero is returned, the channel stream has closed.
:param int nbytes: maximum number of bytes to read.
:return: received data, as a `bytes`.
:raises socket.timeout:
if no data is ready before the timeout set by `settimeout`.
"""
try:
out = self.in_buffer.read(nbytes, self.timeout)
except PipeTimeout:
raise socket.timeout()
ack = self._check_add_window(len(out))
# no need to hold the channel lock when sending this
if ack > 0:
m = Message()
m.add_byte(cMSG_CHANNEL_WINDOW_ADJUST)
m.add_int(self.remote_chanid)
m.add_int(ack)
self.transport._send_user_message(m)
return out
def recv_stderr_ready(self):
"""
Returns true if data is buffered and ready to be read from this
channel's stderr stream. Only channels using `exec_command` or
`invoke_shell` without a pty will ever have data on the stderr
stream.
:return:
``True`` if a `recv_stderr` call on this channel would immediately
return at least one byte; ``False`` otherwise.
.. versionadded:: 1.1
"""
return self.in_stderr_buffer.read_ready()
def recv_stderr(self, nbytes):
"""
Receive data from the channel's stderr stream. Only channels using
`exec_command` or `invoke_shell` without a pty will ever have data
on the stderr stream. The return value is a string representing the
data received. The maximum amount of data to be received at once is
specified by ``nbytes``. If a string of length zero is returned, the
channel stream has closed.
:param int nbytes: maximum number of bytes to read.
:return: received data as a `bytes`
:raises socket.timeout: if no data is ready before the timeout set by
`settimeout`.
.. versionadded:: 1.1
"""
try:
out = self.in_stderr_buffer.read(nbytes, self.timeout)
except PipeTimeout:
raise socket.timeout()
ack = self._check_add_window(len(out))
# no need to hold the channel lock when sending this
if ack > 0:
m = Message()
m.add_byte(cMSG_CHANNEL_WINDOW_ADJUST)
m.add_int(self.remote_chanid)
m.add_int(ack)
self.transport._send_user_message(m)
return out
def send_ready(self):
"""
Returns true if data can be written to this channel without blocking.
This means the channel is either closed (so any write attempt would
return immediately) or there is at least one byte of space in the
outbound buffer. If there is at least one byte of space in the
outbound buffer, a `send` call will succeed immediately and return
the number of bytes actually written.
:return:
``True`` if a `send` call on this channel would immediately succeed
or fail
"""
self.lock.acquire()
try:
if self.closed or self.eof_sent:
return True
return self.out_window_size > 0
finally:
self.lock.release()
def send(self, s):
"""
Send data to the channel. Returns the number of bytes sent, or 0 if
the channel stream is closed. Applications are responsible for
checking that all data has been sent: if only some of the data was
transmitted, the application needs to attempt delivery of the remaining
data.
:param bytes s: data to send
:return: number of bytes actually sent, as an `int`
:raises socket.timeout: if no data could be sent before the timeout set
by `settimeout`.
"""
m = Message()
m.add_byte(cMSG_CHANNEL_DATA)
m.add_int(self.remote_chanid)
return self._send(s, m)
def send_stderr(self, s):
"""
Send data to the channel on the "stderr" stream. This is normally
only used by servers to send output from shell commands -- clients
won't use this. Returns the number of bytes sent, or 0 if the channel
stream is closed. Applications are responsible for checking that all
data has been sent: if only some of the data was transmitted, the
application needs to attempt delivery of the remaining data.
:param bytes s: data to send.
:return: number of bytes actually sent, as an `int`.
:raises socket.timeout:
if no data could be sent before the timeout set by `settimeout`.
.. versionadded:: 1.1
"""
m = Message()
m.add_byte(cMSG_CHANNEL_EXTENDED_DATA)
m.add_int(self.remote_chanid)
m.add_int(1)
return self._send(s, m)
def sendall(self, s):
"""
Send data to the channel, without allowing partial results. Unlike
`send`, this method continues to send data from the given string until
either all data has been sent or an error occurs. Nothing is returned.
:param bytes s: data to send.
:raises socket.timeout:
if sending stalled for longer than the timeout set by `settimeout`.
:raises socket.error:
if an error occurred before the entire string was sent.
.. note::
If the channel is closed while only part of the data has been
sent, there is no way to determine how much data (if any) was sent.
This is irritating, but identically follows Python's API.
"""
while s:
sent = self.send(s)
s = s[sent:]
return None
def sendall_stderr(self, s):
"""
Send data to the channel's "stderr" stream, without allowing partial
results. Unlike `send_stderr`, this method continues to send data
from the given bytestring until all data has been sent or an error
occurs. Nothing is returned.
:param bytes s: data to send to the client as "stderr" output.
:raises socket.timeout:
if sending stalled for longer than the timeout set by `settimeout`.
:raises socket.error:
if an error occurred before the entire string was sent.
.. versionadded:: 1.1
"""
while s:
sent = self.send_stderr(s)
s = s[sent:]
return None
def makefile(self, *params):
"""
Return a file-like object associated with this channel. The optional
``mode`` and ``bufsize`` arguments are interpreted the same way as by
the built-in ``file()`` function in Python.
:return: `.ChannelFile` object which can be used for Python file I/O.
"""
return ChannelFile(*([self] + list(params)))
def makefile_stderr(self, *params):
"""
Return a file-like object associated with this channel's stderr
stream. Only channels using `exec_command` or `invoke_shell`
without a pty will ever have data on the stderr stream.
The optional ``mode`` and ``bufsize`` arguments are interpreted the
same way as by the built-in ``file()`` function in Python. For a
client, it only makes sense to open this file for reading. For a
server, it only makes sense to open this file for writing.
:returns:
`.ChannelStderrFile` object which can be used for Python file I/O.
.. versionadded:: 1.1
"""
return ChannelStderrFile(*([self] + list(params)))
def makefile_stdin(self, *params):
"""
Return a file-like object associated with this channel's stdin
stream.
The optional ``mode`` and ``bufsize`` arguments are interpreted the
same way as by the built-in ``file()`` function in Python. For a
client, it only makes sense to open this file for writing. For a
server, it only makes sense to open this file for reading.
:returns:
`.ChannelStdinFile` object which can be used for Python file I/O.
.. versionadded:: 2.6
"""
return ChannelStdinFile(*([self] + list(params)))
def fileno(self):
"""
Returns an OS-level file descriptor which can be used for polling, but
but not for reading or writing. This is primarily to allow Python's
``select`` module to work.
The first time ``fileno`` is called on a channel, a pipe is created to
simulate real OS-level file descriptor (FD) behavior. Because of this,
two OS-level FDs are created, which will use up FDs faster than normal.
(You won't notice this effect unless you have hundreds of channels
open at the same time.)
:return: an OS-level file descriptor (`int`)
.. warning::
This method causes channel reads to be slightly less efficient.
"""
self.lock.acquire()
try:
if self._pipe is not None:
return self._pipe.fileno()
# create the pipe and feed in any existing data
self._pipe = pipe.make_pipe()
p1, p2 = pipe.make_or_pipe(self._pipe)
self.in_buffer.set_event(p1)
self.in_stderr_buffer.set_event(p2)
return self._pipe.fileno()
finally:
self.lock.release()
def shutdown(self, how):
"""
Shut down one or both halves of the connection. If ``how`` is 0,
further receives are disallowed. If ``how`` is 1, further sends
are disallowed. If ``how`` is 2, further sends and receives are
disallowed. This closes the stream in one or both directions.
:param int how:
0 (stop receiving), 1 (stop sending), or 2 (stop receiving and
sending).
"""
if (how == 0) or (how == 2):
# feign "read" shutdown
self.eof_received = 1
if (how == 1) or (how == 2):
self.lock.acquire()
try:
m = self._send_eof()
finally:
self.lock.release()
if m is not None and self.transport is not None:
self.transport._send_user_message(m)
def shutdown_read(self):
"""
Shutdown the receiving side of this socket, closing the stream in
the incoming direction. After this call, future reads on this
channel will fail instantly. This is a convenience method, equivalent
to ``shutdown(0)``, for people who don't make it a habit to
memorize unix constants from the 1970s.
.. versionadded:: 1.2
"""
self.shutdown(0)
def shutdown_write(self):
"""
Shutdown the sending side of this socket, closing the stream in
the outgoing direction. After this call, future writes on this
channel will fail instantly. This is a convenience method, equivalent
to ``shutdown(1)``, for people who don't make it a habit to
memorize unix constants from the 1970s.
.. versionadded:: 1.2
"""
self.shutdown(1)
@property
def _closed(self):
# Concession to Python 3's socket API, which has a private ._closed
# attribute instead of a semipublic .closed attribute.
return self.closed
# ...calls from Transport
def _set_transport(self, transport):
self.transport = transport
self.logger = util.get_logger(self.transport.get_log_channel())
def _set_window(self, window_size, max_packet_size):
self.in_window_size = window_size
self.in_max_packet_size = max_packet_size
# threshold of bytes we receive before we bother to send
# a window update
self.in_window_threshold = window_size // 10
self.in_window_sofar = 0
self._log(DEBUG, "Max packet in: {} bytes".format(max_packet_size))
def _set_remote_channel(self, chanid, window_size, max_packet_size):
self.remote_chanid = chanid
self.out_window_size = window_size
self.out_max_packet_size = self.transport._sanitize_packet_size(
max_packet_size
)
self.active = 1
self._log(
DEBUG, "Max packet out: {} bytes".format(self.out_max_packet_size)
)
def _request_success(self, m):
self._log(DEBUG, "Sesch channel {} request ok".format(self.chanid))
self.event_ready = True
self.event.set()
return
def _request_failed(self, m):
self.lock.acquire()
try:
msgs = self._close_internal()
finally:
self.lock.release()
for m in msgs:
if m is not None:
self.transport._send_user_message(m)
def _feed(self, m):
if isinstance(m, bytes):
# passed from _feed_extended
s = m
else:
s = m.get_binary()
self.in_buffer.feed(s)
def _feed_extended(self, m):
code = m.get_int()
s = m.get_binary()
if code != 1:
self._log(
ERROR, "unknown extended_data type {}; discarding".format(code)
)
return
if self.combine_stderr:
self._feed(s)
else:
self.in_stderr_buffer.feed(s)
def _window_adjust(self, m):
nbytes = m.get_int()
self.lock.acquire()
try:
if self.ultra_debug:
self._log(DEBUG, "window up {}".format(nbytes))
self.out_window_size += nbytes
self.out_buffer_cv.notify_all()
finally:
self.lock.release()
def _handle_request(self, m):
key = m.get_text()
want_reply = m.get_boolean()
server = self.transport.server_object
ok = False
if key == "exit-status":
self.exit_status = m.get_int()
self.status_event.set()
ok = True
elif key == "xon-xoff":
# ignore
ok = True
elif key == "pty-req":
term = m.get_string()
width = m.get_int()
height = m.get_int()
pixelwidth = m.get_int()
pixelheight = m.get_int()
modes = m.get_string()
if server is None:
ok = False
else:
ok = server.check_channel_pty_request(
self, term, width, height, pixelwidth, pixelheight, modes
)
elif key == "shell":
if server is None:
ok = False
else:
ok = server.check_channel_shell_request(self)
elif key == "env":
name = m.get_string()
value = m.get_string()
if server is None:
ok = False
else:
ok = server.check_channel_env_request(self, name, value)
elif key == "exec":
cmd = m.get_string()
if server is None:
ok = False
else:
ok = server.check_channel_exec_request(self, cmd)
elif key == "subsystem":
name = m.get_text()
if server is None:
ok = False
else:
ok = server.check_channel_subsystem_request(self, name)
elif key == "window-change":
width = m.get_int()
height = m.get_int()
pixelwidth = m.get_int()
pixelheight = m.get_int()
if server is None:
ok = False
else:
ok = server.check_channel_window_change_request(
self, width, height, pixelwidth, pixelheight
)
elif key == "x11-req":
single_connection = m.get_boolean()
auth_proto = m.get_text()
auth_cookie = m.get_binary()
screen_number = m.get_int()
if server is None:
ok = False
else:
ok = server.check_channel_x11_request(
self,
single_connection,
auth_proto,
auth_cookie,
screen_number,
)
elif key == "[email protected]":
if server is None:
ok = False
else:
ok = server.check_channel_forward_agent_request(self)
else:
self._log(DEBUG, 'Unhandled channel request "{}"'.format(key))
ok = False
if want_reply:
m = Message()
if ok:
m.add_byte(cMSG_CHANNEL_SUCCESS)
else:
m.add_byte(cMSG_CHANNEL_FAILURE)
m.add_int(self.remote_chanid)
self.transport._send_user_message(m)
def _handle_eof(self, m):
self.lock.acquire()
try:
if not self.eof_received:
self.eof_received = True
self.in_buffer.close()
self.in_stderr_buffer.close()
if self._pipe is not None:
self._pipe.set_forever()
finally:
self.lock.release()
self._log(DEBUG, "EOF received ({})".format(self._name))
def _handle_close(self, m):
self.lock.acquire()
try:
msgs = self._close_internal()
self.transport._unlink_channel(self.chanid)
finally:
self.lock.release()
for m in msgs:
if m is not None:
self.transport._send_user_message(m)
# ...internals...
def _send(self, s, m):
size = len(s)
self.lock.acquire()
try:
if self.closed:
# this doesn't seem useful, but it is the documented behavior
# of Socket
raise socket.error("Socket is closed")
size = self._wait_for_send_window(size)
if size == 0:
# eof or similar
return 0
m.add_string(s[:size])
finally:
self.lock.release()
# Note: We release self.lock before calling _send_user_message.
# Otherwise, we can deadlock during re-keying.
self.transport._send_user_message(m)
return size
def _log(self, level, msg, *args):
self.logger.log(level, "[chan " + self._name + "] " + msg, *args)
def _event_pending(self):
self.event.clear()
self.event_ready = False
def _wait_for_event(self):
self.event.wait()
assert self.event.is_set()
if self.event_ready:
return
e = self.transport.get_exception()
if e is None:
e = SSHException("Channel closed.")
raise e
def _set_closed(self):
# you are holding the lock.
self.closed = True
self.in_buffer.close()
self.in_stderr_buffer.close()
self.out_buffer_cv.notify_all()
# Notify any waiters that we are closed
self.event.set()
self.status_event.set()
if self._pipe is not None:
self._pipe.set_forever()
def _send_eof(self):
# you are holding the lock.
if self.eof_sent:
return None
m = Message()
m.add_byte(cMSG_CHANNEL_EOF)
m.add_int(self.remote_chanid)
self.eof_sent = True
self._log(DEBUG, "EOF sent ({})".format(self._name))
return m
def _close_internal(self):
# you are holding the lock.
if not self.active or self.closed:
return None, None
m1 = self._send_eof()
m2 = Message()
m2.add_byte(cMSG_CHANNEL_CLOSE)
m2.add_int(self.remote_chanid)
self._set_closed()
# can't unlink from the Transport yet -- the remote side may still
# try to send meta-data (exit-status, etc)
return m1, m2
def _unlink(self):
# server connection could die before we become active:
# still signal the close!
if self.closed:
return
self.lock.acquire()
try:
self._set_closed()
self.transport._unlink_channel(self.chanid)
finally:
self.lock.release()
def _check_add_window(self, n):
self.lock.acquire()
try:
if self.closed or self.eof_received or not self.active:
return 0
if self.ultra_debug:
self._log(DEBUG, "addwindow {}".format(n))
self.in_window_sofar += n
if self.in_window_sofar <= self.in_window_threshold:
return 0
if self.ultra_debug:
self._log(
DEBUG, "addwindow send {}".format(self.in_window_sofar)
)
out = self.in_window_sofar
self.in_window_sofar = 0
return out
finally:
self.lock.release()
def _wait_for_send_window(self, size):
"""
(You are already holding the lock.)
Wait for the send window to open up, and allocate up to ``size`` bytes
for transmission. If no space opens up before the timeout, a timeout
exception is raised. Returns the number of bytes available to send
(may be less than requested).
"""
# you are already holding the lock
if self.closed or self.eof_sent:
return 0
if self.out_window_size == 0:
# should we block?
if self.timeout == 0.0:
raise socket.timeout()
# loop here in case we get woken up but a different thread has
# filled the buffer
timeout = self.timeout
while self.out_window_size == 0:
if self.closed or self.eof_sent:
return 0
then = time.time()
self.out_buffer_cv.wait(timeout)
if timeout is not None:
timeout -= time.time() - then
if timeout <= 0.0:
raise socket.timeout()
# we have some window to squeeze into
if self.closed or self.eof_sent:
return 0
if self.out_window_size < size:
size = self.out_window_size
if self.out_max_packet_size - 64 < size:
size = self.out_max_packet_size - 64
self.out_window_size -= size
if self.ultra_debug:
self._log(DEBUG, "window down to {}".format(self.out_window_size))
return size
class ChannelFile(BufferedFile):
"""
A file-like wrapper around `.Channel`. A ChannelFile is created by calling
`Channel.makefile`.
.. warning::
To correctly emulate the file object created from a socket's `makefile
<python:socket.socket.makefile>` method, a `.Channel` and its
`.ChannelFile` should be able to be closed or garbage-collected
independently. Currently, closing the `ChannelFile` does nothing but
flush the buffer.
"""
def __init__(self, channel, mode="r", bufsize=-1):
self.channel = channel
BufferedFile.__init__(self)
self._set_mode(mode, bufsize)
def __repr__(self):
"""
Returns a string representation of this object, for debugging.
"""
return "<paramiko.ChannelFile from " + repr(self.channel) + ">"
def _read(self, size):
return self.channel.recv(size)
def _write(self, data):
self.channel.sendall(data)
return len(data)
class ChannelStderrFile(ChannelFile):
"""
A file-like wrapper around `.Channel` stderr.
See `Channel.makefile_stderr` for details.
"""
def _read(self, size):
return self.channel.recv_stderr(size)
def _write(self, data):
self.channel.sendall_stderr(data)
return len(data)
class ChannelStdinFile(ChannelFile):
"""
A file-like wrapper around `.Channel` stdin.
See `Channel.makefile_stdin` for details.
"""
def close(self):
super().close()
self.channel.shutdown_write()
| 49,222 | Python | .py | 1,205 | 31.025726 | 79 | 0.601376 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
598 | config.py | paramiko_paramiko/paramiko/config.py | # Copyright (C) 2006-2007 Robey Pointer <[email protected]>
# Copyright (C) 2012 Olle Lundberg <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Configuration file (aka ``ssh_config``) support.
"""
import fnmatch
import getpass
import os
import re
import shlex
import socket
from hashlib import sha1
from io import StringIO
from functools import partial
invoke, invoke_import_error = None, None
try:
import invoke
except ImportError as e:
invoke_import_error = e
from .ssh_exception import CouldNotCanonicalize, ConfigParseError
SSH_PORT = 22
class SSHConfig:
"""
Representation of config information as stored in the format used by
OpenSSH. Queries can be made via `lookup`. The format is described in
OpenSSH's ``ssh_config`` man page. This class is provided primarily as a
convenience to posix users (since the OpenSSH format is a de-facto
standard on posix) but should work fine on Windows too.
.. versionadded:: 1.6
"""
SETTINGS_REGEX = re.compile(r"(\w+)(?:\s*=\s*|\s+)(.+)")
# TODO: do a full scan of ssh.c & friends to make sure we're fully
# compatible across the board, e.g. OpenSSH 8.1 added %n to ProxyCommand.
TOKENS_BY_CONFIG_KEY = {
"controlpath": ["%C", "%h", "%l", "%L", "%n", "%p", "%r", "%u"],
"hostname": ["%h"],
"identityfile": ["%C", "~", "%d", "%h", "%l", "%u", "%r"],
"proxycommand": ["~", "%h", "%p", "%r"],
"proxyjump": ["%h", "%p", "%r"],
# Doesn't seem worth making this 'special' for now, it will fit well
# enough (no actual match-exec config key to be confused with).
"match-exec": ["%C", "%d", "%h", "%L", "%l", "%n", "%p", "%r", "%u"],
}
def __init__(self):
"""
Create a new OpenSSH config object.
Note: the newer alternate constructors `from_path`, `from_file` and
`from_text` are simpler to use, as they parse on instantiation. For
example, instead of::
config = SSHConfig()
config.parse(open("some-path.config")
you could::
config = SSHConfig.from_file(open("some-path.config"))
# Or more directly:
config = SSHConfig.from_path("some-path.config")
# Or if you have arbitrary ssh_config text from some other source:
config = SSHConfig.from_text("Host foo\\n\\tUser bar")
"""
self._config = []
@classmethod
def from_text(cls, text):
"""
Create a new, parsed `SSHConfig` from ``text`` string.
.. versionadded:: 2.7
"""
return cls.from_file(StringIO(text))
@classmethod
def from_path(cls, path):
"""
Create a new, parsed `SSHConfig` from the file found at ``path``.
.. versionadded:: 2.7
"""
with open(path) as flo:
return cls.from_file(flo)
@classmethod
def from_file(cls, flo):
"""
Create a new, parsed `SSHConfig` from file-like object ``flo``.
.. versionadded:: 2.7
"""
obj = cls()
obj.parse(flo)
return obj
def parse(self, file_obj):
"""
Read an OpenSSH config from the given file object.
:param file_obj: a file-like object to read the config file from
"""
# Start out w/ implicit/anonymous global host-like block to hold
# anything not contained by an explicit one.
context = {"host": ["*"], "config": {}}
for line in file_obj:
# Strip any leading or trailing whitespace from the line.
# Refer to https://github.com/paramiko/paramiko/issues/499
line = line.strip()
# Skip blanks, comments
if not line or line.startswith("#"):
continue
# Parse line into key, value
match = re.match(self.SETTINGS_REGEX, line)
if not match:
raise ConfigParseError("Unparsable line {}".format(line))
key = match.group(1).lower()
value = match.group(2)
# Host keyword triggers switch to new block/context
if key in ("host", "match"):
self._config.append(context)
context = {"config": {}}
if key == "host":
# TODO 4.0: make these real objects or at least name this
# "hosts" to acknowledge it's an iterable. (Doing so prior
# to 3.0, despite it being a private API, feels bad -
# surely such an old codebase has folks actually relying on
# these keys.)
context["host"] = self._get_hosts(value)
else:
context["matches"] = self._get_matches(value)
# Special-case for noop ProxyCommands
elif key == "proxycommand" and value.lower() == "none":
# Store 'none' as None - not as a string implying that the
# proxycommand is the literal shell command "none"!
context["config"][key] = None
# All other keywords get stored, directly or via append
else:
if value.startswith('"') and value.endswith('"'):
value = value[1:-1]
# identityfile, localforward, remoteforward keys are special
# cases, since they are allowed to be specified multiple times
# and they should be tried in order of specification.
if key in ["identityfile", "localforward", "remoteforward"]:
if key in context["config"]:
context["config"][key].append(value)
else:
context["config"][key] = [value]
elif key not in context["config"]:
context["config"][key] = value
# Store last 'open' block and we're done
self._config.append(context)
def lookup(self, hostname):
"""
Return a dict (`SSHConfigDict`) of config options for a given hostname.
The host-matching rules of OpenSSH's ``ssh_config`` man page are used:
For each parameter, the first obtained value will be used. The
configuration files contain sections separated by ``Host`` and/or
``Match`` specifications, and that section is only applied for hosts
which match the given patterns or keywords
Since the first obtained value for each parameter is used, more host-
specific declarations should be given near the beginning of the file,
and general defaults at the end.
The keys in the returned dict are all normalized to lowercase (look for
``"port"``, not ``"Port"``. The values are processed according to the
rules for substitution variable expansion in ``ssh_config``.
Finally, please see the docs for `SSHConfigDict` for deeper info on
features such as optional type conversion methods, e.g.::
conf = my_config.lookup('myhost')
assert conf['passwordauthentication'] == 'yes'
assert conf.as_bool('passwordauthentication') is True
.. note::
If there is no explicitly configured ``HostName`` value, it will be
set to the being-looked-up hostname, which is as close as we can
get to OpenSSH's behavior around that particular option.
:param str hostname: the hostname to lookup
.. versionchanged:: 2.5
Returns `SSHConfigDict` objects instead of dict literals.
.. versionchanged:: 2.7
Added canonicalization support.
.. versionchanged:: 2.7
Added ``Match`` support.
.. versionchanged:: 3.3
Added ``Match final`` support.
"""
# First pass
options = self._lookup(hostname=hostname)
# Inject HostName if it was not set (this used to be done incidentally
# during tokenization, for some reason).
if "hostname" not in options:
options["hostname"] = hostname
# Handle canonicalization
canon = options.get("canonicalizehostname", None) in ("yes", "always")
maxdots = int(options.get("canonicalizemaxdots", 1))
if canon and hostname.count(".") <= maxdots:
# NOTE: OpenSSH manpage does not explicitly state this, but its
# implementation for CanonicalDomains is 'split on any whitespace'.
domains = options["canonicaldomains"].split()
hostname = self.canonicalize(hostname, options, domains)
# Overwrite HostName again here (this is also what OpenSSH does)
options["hostname"] = hostname
options = self._lookup(
hostname, options, canonical=True, final=True
)
else:
options = self._lookup(
hostname, options, canonical=False, final=True
)
return options
def _lookup(self, hostname, options=None, canonical=False, final=False):
# Init
if options is None:
options = SSHConfigDict()
# Iterate all stanzas, applying any that match, in turn (so that things
# like Match can reference currently understood state)
for context in self._config:
if not (
self._pattern_matches(context.get("host", []), hostname)
or self._does_match(
context.get("matches", []),
hostname,
canonical,
final,
options,
)
):
continue
for key, value in context["config"].items():
if key not in options:
# Create a copy of the original value,
# else it will reference the original list
# in self._config and update that value too
# when the extend() is being called.
options[key] = value[:] if value is not None else value
elif key == "identityfile":
options[key].extend(
x for x in value if x not in options[key]
)
if final:
# Expand variables in resulting values
# (besides 'Match exec' which was already handled above)
options = self._expand_variables(options, hostname)
return options
def canonicalize(self, hostname, options, domains):
"""
Return canonicalized version of ``hostname``.
:param str hostname: Target hostname.
:param options: An `SSHConfigDict` from a previous lookup pass.
:param domains: List of domains (e.g. ``["paramiko.org"]``).
:returns: A canonicalized hostname if one was found, else ``None``.
.. versionadded:: 2.7
"""
found = False
for domain in domains:
candidate = "{}.{}".format(hostname, domain)
family_specific = _addressfamily_host_lookup(candidate, options)
if family_specific is not None:
# TODO: would we want to dig deeper into other results? e.g. to
# find something that satisfies PermittedCNAMEs when that is
# implemented?
found = family_specific[0]
else:
# TODO: what does ssh use here and is there a reason to use
# that instead of gethostbyname?
try:
found = socket.gethostbyname(candidate)
except socket.gaierror:
pass
if found:
# TODO: follow CNAME (implied by found != candidate?) if
# CanonicalizePermittedCNAMEs allows it
return candidate
# If we got here, it means canonicalization failed.
# When CanonicalizeFallbackLocal is undefined or 'yes', we just spit
# back the original hostname.
if options.get("canonicalizefallbacklocal", "yes") == "yes":
return hostname
# And here, we failed AND fallback was set to a non-yes value, so we
# need to get mad.
raise CouldNotCanonicalize(hostname)
def get_hostnames(self):
"""
Return the set of literal hostnames defined in the SSH config (both
explicit hostnames and wildcard entries).
"""
hosts = set()
for entry in self._config:
hosts.update(entry["host"])
return hosts
def _pattern_matches(self, patterns, target):
# Convenience auto-splitter if not already a list
if hasattr(patterns, "split"):
patterns = patterns.split(",")
match = False
for pattern in patterns:
# Short-circuit if target matches a negated pattern
if pattern.startswith("!") and fnmatch.fnmatch(
target, pattern[1:]
):
return False
# Flag a match, but continue (in case of later negation) if regular
# match occurs
elif fnmatch.fnmatch(target, pattern):
match = True
return match
def _does_match(
self, match_list, target_hostname, canonical, final, options
):
matched = []
candidates = match_list[:]
local_username = getpass.getuser()
while candidates:
candidate = candidates.pop(0)
passed = None
# Obtain latest host/user value every loop, so later Match may
# reference values assigned within a prior Match.
configured_host = options.get("hostname", None)
configured_user = options.get("user", None)
type_, param = candidate["type"], candidate["param"]
# Canonical is a hard pass/fail based on whether this is a
# canonicalized re-lookup.
if type_ == "canonical":
if self._should_fail(canonical, candidate):
return False
if type_ == "final":
passed = final
# The parse step ensures we only see this by itself or after
# canonical, so it's also an easy hard pass. (No negation here as
# that would be uh, pretty weird?)
elif type_ == "all":
return True
# From here, we are testing various non-hard criteria,
# short-circuiting only on fail
elif type_ == "host":
hostval = configured_host or target_hostname
passed = self._pattern_matches(param, hostval)
elif type_ == "originalhost":
passed = self._pattern_matches(param, target_hostname)
elif type_ == "user":
user = configured_user or local_username
passed = self._pattern_matches(param, user)
elif type_ == "localuser":
passed = self._pattern_matches(param, local_username)
elif type_ == "exec":
exec_cmd = self._tokenize(
options, target_hostname, "match-exec", param
)
# This is the laziest spot in which we can get mad about an
# inability to import Invoke.
if invoke is None:
raise invoke_import_error
# Like OpenSSH, we 'redirect' stdout but let stderr bubble up
passed = invoke.run(exec_cmd, hide="stdout", warn=True).ok
# Tackle any 'passed, but was negated' results from above
if passed is not None and self._should_fail(passed, candidate):
return False
# Made it all the way here? Everything matched!
matched.append(candidate)
# Did anything match? (To be treated as bool, usually.)
return matched
def _should_fail(self, would_pass, candidate):
return would_pass if candidate["negate"] else not would_pass
def _tokenize(self, config, target_hostname, key, value):
"""
Tokenize a string based on current config/hostname data.
:param config: Current config data.
:param target_hostname: Original target connection hostname.
:param key: Config key being tokenized (used to filter token list).
:param value: Config value being tokenized.
:returns: The tokenized version of the input ``value`` string.
"""
allowed_tokens = self._allowed_tokens(key)
# Short-circuit if no tokenization possible
if not allowed_tokens:
return value
# Obtain potentially configured hostname, for use with %h.
# Special-case where we are tokenizing the hostname itself, to avoid
# replacing %h with a %h-bearing value, etc.
configured_hostname = target_hostname
if key != "hostname":
configured_hostname = config.get("hostname", configured_hostname)
# Ditto the rest of the source values
if "port" in config:
port = config["port"]
else:
port = SSH_PORT
user = getpass.getuser()
if "user" in config:
remoteuser = config["user"]
else:
remoteuser = user
local_hostname = socket.gethostname().split(".")[0]
local_fqdn = LazyFqdn(config, local_hostname)
homedir = os.path.expanduser("~")
tohash = local_hostname + target_hostname + repr(port) + remoteuser
# The actual tokens!
replacements = {
# TODO: %%???
"%C": sha1(tohash.encode()).hexdigest(),
"%d": homedir,
"%h": configured_hostname,
# TODO: %i?
"%L": local_hostname,
"%l": local_fqdn,
# also this is pseudo buggy when not in Match exec mode so document
# that. also WHY is that the case?? don't we do all of this late?
"%n": target_hostname,
"%p": port,
"%r": remoteuser,
# TODO: %T? don't believe this is possible however
"%u": user,
"~": homedir,
}
# Do the thing with the stuff
tokenized = value
for find, replace in replacements.items():
if find not in allowed_tokens:
continue
tokenized = tokenized.replace(find, str(replace))
# TODO: log? eg that value -> tokenized
return tokenized
def _allowed_tokens(self, key):
"""
Given config ``key``, return list of token strings to tokenize.
.. note::
This feels like it wants to eventually go away, but is used to
preserve as-strict-as-possible compatibility with OpenSSH, which
for whatever reason only applies some tokens to some config keys.
"""
return self.TOKENS_BY_CONFIG_KEY.get(key, [])
def _expand_variables(self, config, target_hostname):
"""
Return a dict of config options with expanded substitutions
for a given original & current target hostname.
Please refer to :doc:`/api/config` for details.
:param dict config: the currently parsed config
:param str hostname: the hostname whose config is being looked up
"""
for k in config:
if config[k] is None:
continue
tokenizer = partial(self._tokenize, config, target_hostname, k)
if isinstance(config[k], list):
for i, value in enumerate(config[k]):
config[k][i] = tokenizer(value)
else:
config[k] = tokenizer(config[k])
return config
def _get_hosts(self, host):
"""
Return a list of host_names from host value.
"""
try:
return shlex.split(host)
except ValueError:
raise ConfigParseError("Unparsable host {}".format(host))
def _get_matches(self, match):
"""
Parse a specific Match config line into a list-of-dicts for its values.
Performs some parse-time validation as well.
"""
matches = []
tokens = shlex.split(match)
while tokens:
match = {"type": None, "param": None, "negate": False}
type_ = tokens.pop(0)
# Handle per-keyword negation
if type_.startswith("!"):
match["negate"] = True
type_ = type_[1:]
match["type"] = type_
# all/canonical have no params (everything else does)
if type_ in ("all", "canonical", "final"):
matches.append(match)
continue
if not tokens:
raise ConfigParseError(
"Missing parameter to Match '{}' keyword".format(type_)
)
match["param"] = tokens.pop(0)
matches.append(match)
# Perform some (easier to do now than in the middle) validation that is
# better handled here than at lookup time.
keywords = [x["type"] for x in matches]
if "all" in keywords:
allowable = ("all", "canonical")
ok, bad = (
list(filter(lambda x: x in allowable, keywords)),
list(filter(lambda x: x not in allowable, keywords)),
)
err = None
if any(bad):
err = "Match does not allow 'all' mixed with anything but 'canonical'" # noqa
elif "canonical" in ok and ok.index("canonical") > ok.index("all"):
err = "Match does not allow 'all' before 'canonical'"
if err is not None:
raise ConfigParseError(err)
return matches
def _addressfamily_host_lookup(hostname, options):
"""
Try looking up ``hostname`` in an IPv4 or IPv6 specific manner.
This is an odd duck due to needing use in two divergent use cases. It looks
up ``AddressFamily`` in ``options`` and if it is ``inet`` or ``inet6``,
this function uses `socket.getaddrinfo` to perform a family-specific
lookup, returning the result if successful.
In any other situation -- lookup failure, or ``AddressFamily`` being
unspecified or ``any`` -- ``None`` is returned instead and the caller is
expected to do something situation-appropriate like calling
`socket.gethostbyname`.
:param str hostname: Hostname to look up.
:param options: `SSHConfigDict` instance w/ parsed options.
:returns: ``getaddrinfo``-style tuples, or ``None``, depending.
"""
address_family = options.get("addressfamily", "any").lower()
if address_family == "any":
return
try:
family = socket.AF_INET6
if address_family == "inet":
family = socket.AF_INET
return socket.getaddrinfo(
hostname,
None,
family,
socket.SOCK_DGRAM,
socket.IPPROTO_IP,
socket.AI_CANONNAME,
)
except socket.gaierror:
pass
class LazyFqdn:
"""
Returns the host's fqdn on request as string.
"""
def __init__(self, config, host=None):
self.fqdn = None
self.config = config
self.host = host
def __str__(self):
if self.fqdn is None:
#
# If the SSH config contains AddressFamily, use that when
# determining the local host's FQDN. Using socket.getfqdn() from
# the standard library is the most general solution, but can
# result in noticeable delays on some platforms when IPv6 is
# misconfigured or not available, as it calls getaddrinfo with no
# address family specified, so both IPv4 and IPv6 are checked.
#
# Handle specific option
fqdn = None
results = _addressfamily_host_lookup(self.host, self.config)
if results is not None:
for res in results:
af, socktype, proto, canonname, sa = res
if canonname and "." in canonname:
fqdn = canonname
break
# Handle 'any' / unspecified / lookup failure
if fqdn is None:
fqdn = socket.getfqdn()
# Cache
self.fqdn = fqdn
return self.fqdn
class SSHConfigDict(dict):
"""
A dictionary wrapper/subclass for per-host configuration structures.
This class introduces some usage niceties for consumers of `SSHConfig`,
specifically around the issue of variable type conversions: normal value
access yields strings, but there are now methods such as `as_bool` and
`as_int` that yield casted values instead.
For example, given the following ``ssh_config`` file snippet::
Host foo.example.com
PasswordAuthentication no
Compression yes
ServerAliveInterval 60
the following code highlights how you can access the raw strings as well as
usefully Python type-casted versions (recalling that keys are all
normalized to lowercase first)::
my_config = SSHConfig()
my_config.parse(open('~/.ssh/config'))
conf = my_config.lookup('foo.example.com')
assert conf['passwordauthentication'] == 'no'
assert conf.as_bool('passwordauthentication') is False
assert conf['compression'] == 'yes'
assert conf.as_bool('compression') is True
assert conf['serveraliveinterval'] == '60'
assert conf.as_int('serveraliveinterval') == 60
.. versionadded:: 2.5
"""
def as_bool(self, key):
"""
Express given key's value as a boolean type.
Typically, this is used for ``ssh_config``'s pseudo-boolean values
which are either ``"yes"`` or ``"no"``. In such cases, ``"yes"`` yields
``True`` and any other value becomes ``False``.
.. note::
If (for whatever reason) the stored value is already boolean in
nature, it's simply returned.
.. versionadded:: 2.5
"""
val = self[key]
if isinstance(val, bool):
return val
return val.lower() == "yes"
def as_int(self, key):
"""
Express given key's value as an integer, if possible.
This method will raise ``ValueError`` or similar if the value is not
int-appropriate, same as the builtin `int` type.
.. versionadded:: 2.5
"""
return int(self[key])
| 27,362 | Python | .py | 614 | 33.625407 | 94 | 0.58659 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
599 | kex_group14.py | paramiko_paramiko/paramiko/kex_group14.py | # Copyright (C) 2013 Torsten Landschoff <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Standard SSH key exchange ("kex" if you wanna sound cool). Diffie-Hellman of
2048 bit key halves, using a known "p" prime and "g" generator.
"""
from paramiko.kex_group1 import KexGroup1
from hashlib import sha1, sha256
class KexGroup14(KexGroup1):
# http://tools.ietf.org/html/rfc3526#section-3
P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF # noqa
G = 2
name = "diffie-hellman-group14-sha1"
hash_algo = sha1
class KexGroup14SHA256(KexGroup14):
name = "diffie-hellman-group14-sha256"
hash_algo = sha256
| 1,833 | Python | .py | 32 | 55.15625 | 530 | 0.829336 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |