[1] add performance measurement with random/sorted datasets

This commit is contained in:
KislyuninED 2026-05-14 01:05:15 +00:00
parent 90e28edfbe
commit 164bcbb260

View File

@ -0,0 +1,88 @@
import random
import time
import csv
from phonebook import ll_insert, ll_find, ll_delete, ll_list_all
from phonebook import ht_create, ht_insert, ht_find, ht_delete, ht_list_all
from phonebook import bst_insert, bst_find, bst_delete, bst_list_all
def generate_records(n, seed=42):
random.seed(seed)
recs = []
for i in range(1, n+1):
name = f"User_{i:05d}"
phone = f"{random.randint(100,999)}-{random.randint(1000,9999)}"
recs.append((name, phone))
return recs
def prepare_datasets(recs):
shuffled = recs.copy()
random.shuffle(shuffled)
sorted_recs = sorted(recs, key=lambda x: x[0])
return shuffled, sorted_recs
def measure_structure(create_func, insert_func, find_func, delete_func, records, repeats=5):
insert_times = []
find_times = []
delete_times = []
existing_names = [name for name,_ in records]
search_names = random.sample(existing_names, 100) + [f"None_{i}" for i in range(10)]
random.shuffle(search_names)
delete_names = random.sample(existing_names, 50)
for _ in range(repeats):
struct = create_func()
# вставка
start = time.perf_counter()
for name, phone in records:
struct = insert_func(struct, name, phone)
insert_times.append(time.perf_counter() - start)
# поиск
start = time.perf_counter()
for name in search_names:
find_func(struct, name)
find_times.append(time.perf_counter() - start)
# удаление
start = time.perf_counter()
for name in delete_names:
struct = delete_func(struct, name)
delete_times.append(time.perf_counter() - start)
return insert_times, find_times, delete_times
def main():
N = 1000
base = generate_records(N)
shuffled, sorted_recs = prepare_datasets(base)
results = []
# Linked list
for mode, data in [('random', shuffled), ('sorted', sorted_recs)]:
ins, find, dele = measure_structure(lambda: None, ll_insert, ll_find, ll_delete, data)
for i in range(5):
results.append(['LinkedList', mode, 'insert', ins[i]])
results.append(['LinkedList', mode, 'find', find[i]])
results.append(['LinkedList', mode, 'delete', dele[i]])
# Hash table
for mode, data in [('random', shuffled), ('sorted', sorted_recs)]:
ins, find, dele = measure_structure(ht_create, ht_insert, ht_find, ht_delete, data)
for i in range(5):
results.append(['HashTable', mode, 'insert', ins[i]])
results.append(['HashTable', mode, 'find', find[i]])
results.append(['HashTable', mode, 'delete', dele[i]])
# BST
for mode, data in [('random', shuffled), ('sorted', sorted_recs)]:
ins, find, dele = measure_structure(lambda: None, bst_insert, bst_find, bst_delete, data)
for i in range(5):
results.append(['BST', mode, 'insert', ins[i]])
results.append(['BST', mode, 'find', find[i]])
results.append(['BST', mode, 'delete', dele[i]])
with open('results.csv', 'w', newline='') as f:
writer = csv.writer(f)
writer.writerow(['Structure','Mode','Operation','Time_sec'])
writer.writerows(results)
print("Results saved to results.csv")
if __name__ == '__main__':
main()