From 06c87ccb969aa0eac79c3f7764fc66e389cfcc46 Mon Sep 17 00:00:00 2001 From: Aseel Omer Date: Wed, 19 Nov 2025 02:34:24 +0200 Subject: [PATCH 1/3] updating the repo --- 0_domain_study/README.md | 1 - 0_domain_study/guide.md | 15 - 0_domain_study/model_evaluation_metrics.md | 77 ---- .../model_selection_justification.md | 128 ------ .../.DS_Store | Bin .../RATIONALE.md | 0 .../README.md | 0 .../images/evaluation-process.png | Bin .../images/prompt-sequence.png | Bin .../images/test-selection.png | Bin .../source_text.txt | 0 .../test_data.json | 0 .../test_prompts.md | 0 .../README.md | 0 .../models.md | 0 1_datasets/README.md | 1 - 1_datasets/guide.md | 363 ------------------ 2_data_preparation/README.md | 1 - 2_data_preparation/guide.md | 19 - .../Apollo11_rag&distilled.ipynb | 0 .../rag_and_distilled_model}/README.md | 0 .../data}/test_data.json | 0 .../rag_and_distilled_model}/emissions.csv | 0 .../rag_and_distilled_model}/model_answers.md | 0 .../mistral7b}/data/Apllo.pdf | Bin .../mistral7b}/mistral7b responses.pdf | Bin .../mistral7b}/model_rag_carbon.ipynb | 0 .../model_rag_carbon_recursive.ipynb | 0 .../quantized_models/mistral7b}/readme.md | 0 .../slm/google_gemma}/README.md | 0 .../slm/google_gemma}/data/source.txt | 0 .../slm/google_gemma}/emissions.csv | 0 ...e_27c11b81-b7a7-47c9-ab82-9ae317559fe9.csv | 0 ...e_685c527f-27ef-47b7-9955-a38ea34424e8.csv | 0 ...e_7005ea2e-d9a3-40aa-900e-f95b5e5623a2.csv | 0 ...e_f2cce301-9b4f-4e6a-b97d-5179efd64332.csv | 0 .../slm/google_gemma}/gemma.ipynb | 0 .../google_gemma}/google_gemma_responses.pdf | Bin 3_data_exploration/README.md | 1 - 3_data_exploration/guide.md | 28 -- 4_data_analysis/README.md | 1 - 4_data_analysis/guide.md | 23 -- README.md | 200 ---------- collaboration/README.md | 5 - collaboration/communication.md | 53 --- collaboration/constraints.md | 34 -- collaboration/guide/0_repository_setup.md | 43 --- collaboration/guide/1_group_norms.md | 4 - collaboration/guide/2_learning_goals.md | 26 -- collaboration/guide/3_constraints.md | 20 - collaboration/guide/4_communication.md | 33 -- collaboration/guide/5_project_board.md | 38 -- collaboration/guide/6_development.md | 60 --- collaboration/guide/7_retrospective.md | 56 --- collaboration/guide/README.md | 214 ----------- .../guide/assets/branching_strategy.svg | 21 - .../assets/claim_branch_review_merge.svg | 21 - collaboration/learning_goals.md | 5 - .../0_cross_cultural_collaboration.md | 0 .../1_problem_identification.md | 0 .../retrospectives/2_data_collection.md | 0 .../retrospectives/3_data_analysis.md | 0 .../retrospectives/4_communicating_results.md | 0 .../retrospectives/5_final_presentation.md | 0 collaboration/retrospectives/README.md | 3 - collaboration/retrospectives/_template.md | 31 -- 66 files changed, 1525 deletions(-) delete mode 100644 0_domain_study/README.md delete mode 100644 0_domain_study/guide.md delete mode 100644 0_domain_study/model_evaluation_metrics.md delete mode 100644 0_domain_study/model_selection_justification.md rename {test_dataset_apollo11 => 0_test_dataset_apollo11}/.DS_Store (100%) rename {test_dataset_apollo11 => 0_test_dataset_apollo11}/RATIONALE.md (100%) rename {test_dataset_apollo11 => 0_test_dataset_apollo11}/README.md (100%) rename {test_dataset_apollo11 => 0_test_dataset_apollo11}/images/evaluation-process.png (100%) rename {test_dataset_apollo11 => 0_test_dataset_apollo11}/images/prompt-sequence.png (100%) rename {test_dataset_apollo11 => 0_test_dataset_apollo11}/images/test-selection.png (100%) rename {test_dataset_apollo11 => 0_test_dataset_apollo11}/source_text.txt (100%) rename {rag_and_distilled_model/data => 0_test_dataset_apollo11}/test_data.json (100%) rename {test_dataset_apollo11 => 0_test_dataset_apollo11}/test_prompts.md (100%) rename {commercial_models => 1_commercial_models}/README.md (100%) rename {commercial_models => 1_commercial_models}/models.md (100%) delete mode 100644 1_datasets/README.md delete mode 100644 1_datasets/guide.md delete mode 100644 2_data_preparation/README.md delete mode 100644 2_data_preparation/guide.md rename {rag_and_distilled_model => 2_open_source_models/distilled_models/rag_and_distilled_model}/Apollo11_rag&distilled.ipynb (100%) rename {rag_and_distilled_model => 2_open_source_models/distilled_models/rag_and_distilled_model}/README.md (100%) rename {test_dataset_apollo11 => 2_open_source_models/distilled_models/rag_and_distilled_model/data}/test_data.json (100%) rename {rag_and_distilled_model => 2_open_source_models/distilled_models/rag_and_distilled_model}/emissions.csv (100%) rename {rag_and_distilled_model => 2_open_source_models/distilled_models/rag_and_distilled_model}/model_answers.md (100%) rename {mistral7b => 2_open_source_models/quantized_models/mistral7b}/data/Apllo.pdf (100%) rename {mistral7b => 2_open_source_models/quantized_models/mistral7b}/mistral7b responses.pdf (100%) rename {mistral7b => 2_open_source_models/quantized_models/mistral7b}/model_rag_carbon.ipynb (100%) rename {mistral7b => 2_open_source_models/quantized_models/mistral7b}/model_rag_carbon_recursive.ipynb (100%) rename {mistral7b => 2_open_source_models/quantized_models/mistral7b}/readme.md (100%) rename {google_gemma => 2_open_source_models/slm/google_gemma}/README.md (100%) rename {google_gemma => 2_open_source_models/slm/google_gemma}/data/source.txt (100%) rename {google_gemma => 2_open_source_models/slm/google_gemma}/emissions.csv (100%) rename {google_gemma => 2_open_source_models/slm/google_gemma}/emissions_base_27c11b81-b7a7-47c9-ab82-9ae317559fe9.csv (100%) rename {google_gemma => 2_open_source_models/slm/google_gemma}/emissions_base_685c527f-27ef-47b7-9955-a38ea34424e8.csv (100%) rename {google_gemma => 2_open_source_models/slm/google_gemma}/emissions_base_7005ea2e-d9a3-40aa-900e-f95b5e5623a2.csv (100%) rename {google_gemma => 2_open_source_models/slm/google_gemma}/emissions_base_f2cce301-9b4f-4e6a-b97d-5179efd64332.csv (100%) rename {google_gemma => 2_open_source_models/slm/google_gemma}/gemma.ipynb (100%) rename {google_gemma => 2_open_source_models/slm/google_gemma}/google_gemma_responses.pdf (100%) delete mode 100644 3_data_exploration/README.md delete mode 100644 3_data_exploration/guide.md delete mode 100644 4_data_analysis/README.md delete mode 100644 4_data_analysis/guide.md delete mode 100644 README.md delete mode 100644 collaboration/README.md delete mode 100644 collaboration/communication.md delete mode 100644 collaboration/constraints.md delete mode 100644 collaboration/guide/0_repository_setup.md delete mode 100644 collaboration/guide/1_group_norms.md delete mode 100644 collaboration/guide/2_learning_goals.md delete mode 100644 collaboration/guide/3_constraints.md delete mode 100644 collaboration/guide/4_communication.md delete mode 100644 collaboration/guide/5_project_board.md delete mode 100644 collaboration/guide/6_development.md delete mode 100644 collaboration/guide/7_retrospective.md delete mode 100644 collaboration/guide/README.md delete mode 100644 collaboration/guide/assets/branching_strategy.svg delete mode 100644 collaboration/guide/assets/claim_branch_review_merge.svg delete mode 100644 collaboration/learning_goals.md delete mode 100644 collaboration/retrospectives/0_cross_cultural_collaboration.md delete mode 100644 collaboration/retrospectives/1_problem_identification.md delete mode 100644 collaboration/retrospectives/2_data_collection.md delete mode 100644 collaboration/retrospectives/3_data_analysis.md delete mode 100644 collaboration/retrospectives/4_communicating_results.md delete mode 100644 collaboration/retrospectives/5_final_presentation.md delete mode 100644 collaboration/retrospectives/README.md delete mode 100644 collaboration/retrospectives/_template.md diff --git a/0_domain_study/README.md b/0_domain_study/README.md deleted file mode 100644 index 308b711..0000000 --- a/0_domain_study/README.md +++ /dev/null @@ -1 +0,0 @@ -# Domain Research diff --git a/0_domain_study/guide.md b/0_domain_study/guide.md deleted file mode 100644 index d36e40c..0000000 --- a/0_domain_study/guide.md +++ /dev/null @@ -1,15 +0,0 @@ -# Domain Study: Guide - -To do meaningful research in a domain, you need to learn what others already do -and don't understand in this area. Use this folder to organize your group's -understanding of your research domain including: your own summaries, helpful -PDFs, links you found helpful, ... - -This folder is different from `/notes` because it contains _only_ information -about your research domain. When deciding what goes here, ask yourself this -question: _Would someone need to know this to understand our research?_ - -## README.md - -Use this folder's README to document all the notes and resources in this folder. -Someone shouldn't need to read through _everything_ to find what they need. diff --git a/0_domain_study/model_evaluation_metrics.md b/0_domain_study/model_evaluation_metrics.md deleted file mode 100644 index 83ba2ac..0000000 --- a/0_domain_study/model_evaluation_metrics.md +++ /dev/null @@ -1,77 +0,0 @@ -# Model Testing Metrics - -|Skill Type (Task)|What It Tests|Example Dataset|Metric to Measure Accuracy| -|-----------------|-------------|---------------|--------------------------| -|Reasoning / Logic|Mathematical reasoning|GSM8K|(correct answers / total)| -|Commonsense QA|Everyday reasoning and knowledge|PIQA, BoolQ|Accuracy| -|Summarization|Condensing information|CNN/DailyMail, XSum|ROUGE-L, BERTScore| -|Code Generation|Logical structure|HumanEval-lite, MBPP|Pass@k| - -## Datasets - -### GSM8K(Grade School Math 8K) - -It is a dataset of 8.5K high quality linguistically diverse -grade school math word problems. The dataset was created to support the task -of question answering on basic mathematical problems that require multi-step -reasoning. - -### BoolQ - -It is a question answering dataset for yes/no questions containing -15942 examples. These questions are naturally occurring ---they are -generated in unprompted and unconstrained settings. - -### PIQA - -This dataset introduces the task of physical commonsense reasoning and a -corresponding benchmark dataset Physical Interaction: Question Answering -or PIQA. - -### Extreme Summarization (XSum) Dataset - -There are three features: -document: Input news article. -summary: One sentence summary of the article. -id: BBC ID of the article. - -### The CNN / DailyMail Dataset - -It is an English-language dataset containing just over -300k unique news articles as written by journalists at CNN and the Daily Mail. -he current version supports both extractive and abstractive summarization. - -### The HumanEval Dataset - -released by OpenAI includes 164 programming problems -with a function sig- nature, docstring, body, and several unit tests. -They were handwritten to ensure not to be included in the training -set of code generation models. - -### MBPP - -The benchmark consists of around 1,000 crowd-sourced Python programming -problems, designed to be solvable by entry level programmers, covering -programming fundamentals, standard library functionality, and so on. -Each problem consists of a task description,code solution and 3 automated -test cases. - -## Metrics - -### Pass@1 - -The percentage of problems for which the model’s first generated solution -passes all tests. - -### BERTScore - -It measures how similar two pieces of text are in meaning, not just in word -overlap. It uses BERT embeddings (or similar transformer embeddings) to -compare the semantic content of the generated text and the reference text. - -### ROUGE, or Recall-Oriented Understudy for Gisting Evaluation - -It is a set of metrics and a software package used for evaluating automatic -summarization and machine translation software in natural language processing. -The metrics compare an automatically produced summary or translation against -a reference or a set of references (human-produced) summary or translation. diff --git a/0_domain_study/model_selection_justification.md b/0_domain_study/model_selection_justification.md deleted file mode 100644 index a87a8b0..0000000 --- a/0_domain_study/model_selection_justification.md +++ /dev/null @@ -1,128 +0,0 @@ -# Comparing Open-Source and Commercial LLMs on Reasoning and Summarization Tasks - -## Summary - -**Model Pairing:** GPT-4 vs. Mistral-3B (4 variants) -**Tasks:** Reasoning + Summarization -**Evaluation:** Accuracy, stepwise logic, summarization quality -**Sample Size:** Start small, scale to 50+ for significance - ---- - -## Goal - -To compare the accuracy and environmental impact of: - -- One commercial LLM (closed-source) -- One open-source LLM in four configurations: - - Original - - Distilled - - RAG-enhanced (Retrieval-Augmented Generation) - - Distilled + RAG - ---- - -## Recommended Commercial Model - -**Model:** GPT-4 (OpenAI) - -**Why:** - -- Industry benchmark for reasoning and summarization -- Strong performance across tasks -- Compatible with G-Eval evaluation -- API access available (paid) - -**Alternative:** Claude 3 Opus (Anthropic): strong in reasoning, -slightly weaker in summarization. - ---- - -## Recommended Open-Source Model - -**Model:** Mistral-3B - -**Why:** - -- Lightweight and energy-efficient — smaller carbon footprint than 7B -- Good performance for its size and architecture -- Easy to distill and integrate with RAG -- Active open-source community on Hugging Face - -**Alternative:** Mistral-7B (legacy, more accurate but heavier) or -LLaMA-3-8B (requires stronger GPUs). - ---- - -## Evaluation Strategy - -### 1. Reasoning Tasks - -- ARC (AI2 Reasoning Challenge / grade-school science questions) -- GSM8K (Math reasoning) -- ProofWriter (Step-by-step inference) -- LogiQA (Logical multiple choice) - -### 2. Summarization Tasks - -- News articles -- Academic abstracts -- Narrative texts - ---- - -## Sample Size Recommendations - -| Sampling Level | Purpose / Use Case | Reasoning | Summarization | -|----------------|--------------------|------------|----------------| -| Preliminary | Quick validation and failure detection | 50–100 | 50–100 | -| Reliable | Statistically meaningful trends | 200–500+ | 200–500+ | -| Academic | Comprehensive benchmark-level | 1,000–10,000+ | 1,000–10,000+ | - -**Rationale:** - -- Preliminary: Initial signal of model behavior. -- Reliable: Minimum for academic validity (500+ examples). -- Academic: Derived from MMLU and MATH benchmarks (1,000+ examples). - ---- - -## Academic Justification of Sample Size - -| Ref | Benchmark / Source | Justification | -|-----|--------------------|----------------| -| G1 | MMLU Benchmark | 57 subjects, thousands of Qs → 1,000+ needed | -| G2 | MATH Benchmark | 12,500 math problems → 1,000+ subset valid | -| G3 | ANLI / LLM Eval | 1,200 test examples → supports 200–500+ | -| G4 | ML Sample Size | 500+ gives strong validity in ML research | - ---- - -## Why This Project Is Niche and Valuable - -**Unique because:** - -- Compares *versions* of the same open-source model. -- Evaluates *accuracy + environmental impact* (energy, CO₂). - -**Valuable because:** - -- Helps understand trade-offs between performance and footprint. -- Designed for student teams with limited resources. -- Provides replicable framework for *ethical + technical* evaluation. -- Supports the global shift toward *eco-conscious AI*. - -**References:** - -- [DeepSeek vs GPT-4 vs LLaMA vs Mistral vs Cohere](https://www.aubergine.co/insights/deepseek-v3-vs-gpt-4-vs-llama-3-vs-mistral-7b-vs-cohere) -- [Mistral vs GPT comparison](https://dev.to/abhinowww/mistral-vs-gpt-a-comprehensive-comparison-of-leading-ai-models-2lk2) - ---- - -## Note on Mistral Model Selection - -- Mistral-7B is a *legacy model* (as of March 2025) but still benchmarked. -- Mistral-3B offers better efficiency, lower GPU use, smaller footprint. -- Our main open-source model: **Mistral-3B** -- Mistral-7B appears as a baseline reference. -- Mistral-Nemo: Mentioned as a next-generation model for discussion. diff --git a/test_dataset_apollo11/.DS_Store b/0_test_dataset_apollo11/.DS_Store similarity index 100% rename from test_dataset_apollo11/.DS_Store rename to 0_test_dataset_apollo11/.DS_Store diff --git a/test_dataset_apollo11/RATIONALE.md b/0_test_dataset_apollo11/RATIONALE.md similarity index 100% rename from test_dataset_apollo11/RATIONALE.md rename to 0_test_dataset_apollo11/RATIONALE.md diff --git a/test_dataset_apollo11/README.md b/0_test_dataset_apollo11/README.md similarity index 100% rename from test_dataset_apollo11/README.md rename to 0_test_dataset_apollo11/README.md diff --git a/test_dataset_apollo11/images/evaluation-process.png b/0_test_dataset_apollo11/images/evaluation-process.png similarity index 100% rename from test_dataset_apollo11/images/evaluation-process.png rename to 0_test_dataset_apollo11/images/evaluation-process.png diff --git a/test_dataset_apollo11/images/prompt-sequence.png b/0_test_dataset_apollo11/images/prompt-sequence.png similarity index 100% rename from test_dataset_apollo11/images/prompt-sequence.png rename to 0_test_dataset_apollo11/images/prompt-sequence.png diff --git a/test_dataset_apollo11/images/test-selection.png b/0_test_dataset_apollo11/images/test-selection.png similarity index 100% rename from test_dataset_apollo11/images/test-selection.png rename to 0_test_dataset_apollo11/images/test-selection.png diff --git a/test_dataset_apollo11/source_text.txt b/0_test_dataset_apollo11/source_text.txt similarity index 100% rename from test_dataset_apollo11/source_text.txt rename to 0_test_dataset_apollo11/source_text.txt diff --git a/rag_and_distilled_model/data/test_data.json b/0_test_dataset_apollo11/test_data.json similarity index 100% rename from rag_and_distilled_model/data/test_data.json rename to 0_test_dataset_apollo11/test_data.json diff --git a/test_dataset_apollo11/test_prompts.md b/0_test_dataset_apollo11/test_prompts.md similarity index 100% rename from test_dataset_apollo11/test_prompts.md rename to 0_test_dataset_apollo11/test_prompts.md diff --git a/commercial_models/README.md b/1_commercial_models/README.md similarity index 100% rename from commercial_models/README.md rename to 1_commercial_models/README.md diff --git a/commercial_models/models.md b/1_commercial_models/models.md similarity index 100% rename from commercial_models/models.md rename to 1_commercial_models/models.md diff --git a/1_datasets/README.md b/1_datasets/README.md deleted file mode 100644 index 157ea3a..0000000 --- a/1_datasets/README.md +++ /dev/null @@ -1 +0,0 @@ -# Datasets diff --git a/1_datasets/guide.md b/1_datasets/guide.md deleted file mode 100644 index 8b17876..0000000 --- a/1_datasets/guide.md +++ /dev/null @@ -1,363 +0,0 @@ -# Datasets: Guide - -Store your local datasets in this folder (`.csv`, `.xlsx`, `.json`, `.sqlite`, -...). You can use the README to document each dataset (where it's from, what -data & types it contains, what you use it for, ...). - -One of the primary goals of this repository is that anyone can clone and -replicate your research. To make this possible **DO NOT modify or overwrite your -raw datasets**! You should keep them _exactly_ as they were when you downloaded -them, you may even want to name them `dataset.raw.ext` (eg. -`daily_temperatures.raw.csv`). - -When cleaning and processing your datasets, you should save the prepared data to -a _new_ file with a descriptive name. This approach will result in many dataset -files, but that's ok! - -## README.md - -Use the README in this folder to document each dataset in the folder. Include -information like: where is the data from? how was it collected? how does it -relate to your problem? ... - -## Types of Dataset - -A dataset is "simply" a collection of related measurements or observations. To -create a good model of your problem using data you must understanding what -_kinds_ of data exist, how to understand them, and the best ways to analyze each -one. The kind of data you choose impacts: - -- The tools you use for exploration and analysis -- How we visualize the data -- The statistical methods you can apply -- The type of conclusions you draw -- And how confident you are of your conclusions - -Below is an overview of different kinds of dataset you will encounter: - -1. [Classification by Data Type](#classification-by-data-type) -2. [Classification by Structure](#classification-by-structure) -3. [Classification by Collection Method](#classification-by-collection-method) -4. [Classification by Size and Complexity](#classification-by-size-and-complexity) -5. [Classification by Access Type](#classification-by-access-type) -6. [Classification by Purpose](#classification-by-purpose) -7. [Classification by Format](#classification-by-format) - -## Classification by Data Type - -### Quantitative (Numerical) Data - -Data that represents quantities and can represented as numbers. - -#### Continuous Data - -- **Definition**: Can take any value within a range (including fractions and - decimals) -- **Examples**: Height, weight, temperature, time, distance -- **Analysis**: Mean, median, standard deviation, histograms, scatter plots -- **Real-world example**: Recording daily temperature over a month (72.5°F, - 68.3°F, etc.) - -#### Discrete Data - -- **Definition**: Countable values, typically whole numbers -- **Examples**: Number of children, items sold, count of occurrences -- **Analysis**: Frequency tables, bar charts, mode -- **Real-world example**: Number of customers visiting a store each day (45, 52, - 38, etc.) - -### Qualitative (Categorical) Data - -Data that describes qualities or characteristics of what you want to study. - -#### Nominal Data - -- **Definition**: Categories with no inherent order or ranking -- **Examples**: Gender, blood type, country, color, product type -- **Analysis**: Frequency counts, mode, chi-square tests, pie charts -- **Real-world example**: Survey responses for favorite color (red, blue, green, - etc.) - -#### Ordinal Data - -- **Definition**: Categories with a meaningful order or ranking -- **Examples**: Education level, satisfaction ratings (1-5), economic status -- **Analysis**: Median, percentiles, rank correlations, stacked bar charts -- **Real-world example**: Customer satisfaction ratings (very dissatisfied, - dissatisfied, neutral, satisfied, very satisfied) - -### Binary Data - -- **Definition**: Data with only two possible values -- **Examples**: Yes/no questions, pass/fail outcomes, true/false conditions -- **Analysis**: Proportions, odds ratios, logistic regression -- **Real-world example**: Email spam classification (spam/not spam) - -### Time Series Data - -- **Definition**: Sequential data points collected at specific time intervals -- **Examples**: Stock prices, weather data, website traffic -- **Analysis**: Trend analysis, seasonal decomposition, forecasting -- **Real-world example**: Monthly sales figures over several years - -## Classification by Structure - -### Structured Data - -- **Definition**: Organized in a consistent, predefined format -- **Examples**: Relational databases, spreadsheets, CSV files -- **Characteristics**: - - Follows a schema - - Easy to search and analyze - - Typically stored in rows and columns -- **Real-world example**: Customer information in a CRM database - -### Semi-structured Data - -- **Definition**: Has some organizational properties but not rigid schema -- **Examples**: JSON, XML, email, HTML -- **Characteristics**: - - Flexible format - - Contains tags or markers to separate elements - - Self-describing structure -- **Real-world example**: JSON response from a web API - -### Unstructured Data - -- **Definition**: No predefined format or organization -- **Examples**: Text documents, images, audio, video, social media posts -- **Characteristics**: - - Difficult to process with traditional tools - - Often requires specialized techniques (NLP, computer vision) - - Comprises ~80-90% of all data generated -- **Real-world example**: Customer reviews or feedback in free text format - -## Classification by Collection Method - -### Primary Data - -- **Definition**: Collected firsthand for a specific purpose -- **Examples**: Surveys, experiments, interviews, direct observations -- **Advantages**: Tailored to research needs, higher control over quality -- **Disadvantages**: Time-consuming, potentially expensive -- **Real-world example**: Market research survey designed specifically for a new - product - -### Secondary Data - -- **Definition**: Data previously collected for other purposes -- **Examples**: Census data, published studies, company records -- **Advantages**: Cost-effective, time-saving, often larger sample sizes -- **Disadvantages**: May not perfectly fit current research needs -- **Real-world example**: Using government census data for demographic analysis - -### [Proxy Data](https://centerforgov.gitbooks.io/benchmarking/content/Proxy.html) - -- **Definition**: Data that is -- **Examples**: Tree rings to proxy historical weather patterns, tax data to - proxy incomes -- **Advantages**: Helos you understand phenomena that are difficult or - impossible to study directly. -- **Disadvantages**: You cannot draw conclusions with the same confidence. -- **Real-world example**: Using the stock market + unemployment rates as a proxy - for the economy.. - -### Experimental Data - -- **Definition**: Generated from controlled experiments with manipulated - variables -- **Examples**: A/B tests, clinical trials, laboratory experiments -- **Characteristics**: - - Control and treatment groups - - Controlled conditions - - Designed to establish causality -- **Real-world example**: Testing whether a new website design increases - conversion rates - -### Observational Data - -- **Definition**: Collected through observation without direct intervention -- **Examples**: Traffic patterns, wildlife behavior, market trends -- **Characteristics**: - - Natural setting - - No manipulation of variables - - Good for establishing correlation (not causation) -- **Real-world example**: Observing and recording consumer shopping behaviors in - a store - -## Classification by Size and Complexity - -### Small Data - -- **Definition**: Datasets manageable with traditional tools and methods -- **Characteristics**: - - Can fit in memory of a typical computer - - Processable with standard software (Excel, SPSS) - - Usually under several gigabytes -- **Analysis**: Standard statistical methods, desktop tools -- **Real-world example**: Survey responses from 500 participants - -### Big Data - -- **Definition**: Datasets too large or complex for traditional processing -- **Characterized by the 5 Vs**: - - **Volume**: Extremely large size - - **Velocity**: Generated at high speed - - **Variety**: Various formats and types - - **Veracity**: Uncertainty and reliability concerns - - **Value**: Extracting meaningful insights -- **Analysis**: Specialized tools (Hadoop, Spark), distributed computing -- **Real-world example**: Social media data from millions of users - -### High-dimensional Data - -- **Definition**: Many variables or features per observation -- **Examples**: Genomic data, image data, complex sensors -- **Challenges**: - - Curse of dimensionality - - Feature selection importance - - Visualization difficulties -- **Analysis**: Dimension reduction techniques (PCA, t-SNE), specialized - algorithms -- **Real-world example**: Gene expression data with thousands of genes measured - for each sample - -## Classification by Access Type - -### Public Data - -- **Definition**: Freely available to anyone -- **Examples**: Government data portals, open datasets, public research data -- **Characteristics**: - - No access restrictions - - Often licensed for reuse - - May have usage guidelines -- **Real-world example**: World Bank development indicators - -### Private Data - -- **Definition**: Access restricted to authorized users -- **Examples**: Company internal data, personal health records, proprietary - research -- **Characteristics**: - - Security measures required - - Often subject to privacy regulations - - May require anonymization for broader use -- **Real-world example**: Patient medical records in a hospital database - -### Proprietary Data - -- **Definition**: Owned by organizations and often commercially valuable -- **Examples**: Nielsen ratings, credit scores, market research data -- **Characteristics**: - - Commercial value - - Legal protections - - Often licensed for a fee -- **Real-world example**: Credit bureau consumer data - -## Classification by Purpose - -### Transactional Data - -- **Definition**: Records of business or system transactions -- **Examples**: Sales records, banking transactions, server logs -- **Characteristics**: - - High volume - - Time-stamped - - Operation-focused -- **Real-world example**: Point-of-sale data from retail stores - -### Analytical Data - -- **Definition**: Processed and organized for analysis and decision-making -- **Examples**: Data warehouses, OLAP cubes, aggregated reports -- **Characteristics**: - - Often derived from transactional data - - Optimized for querying and analysis - - May include historical perspectives -- **Real-world example**: Quarterly sales performance dashboard - -### Master Data - -- **Definition**: Core business entities that rarely change -- **Examples**: Customer database, product catalog, employee records -- **Characteristics**: - - Reference data - - Shared across systems - - Requires governance -- **Real-world example**: Product master list with SKUs, descriptions, and - categories - -### Metadata - -- **Definition**: Data about data -- **Examples**: File creation dates, database schema, data dictionaries -- **Characteristics**: - - Describes structure and context - - Essential for data management - - Facilitates data discovery -- **Real-world example**: Column names and descriptions for a dataset - -## Classification by Format - -### Tabular Data - -- **Definition**: Organized in tables with rows and columns -- **Examples**: CSV, Excel files, database tables -- **Characteristics**: - - Most common format for analysis - - Each row is an observation, each column a variable -- **Real-world example**: Excel spreadsheet of monthly expenses - -### Hierarchical Data - -- **Definition**: Organized in a tree-like structure with parent-child - relationships -- **Examples**: XML, JSON, file systems -- **Characteristics**: - - Nested structure - - Good for representing complex relationships -- **Real-world example**: Organization chart - -### Network Data - -- **Definition**: Represents connections between entities -- **Examples**: Social networks, transportation systems, web links -- **Characteristics**: - - Consists of nodes and edges - - Focus on relationships -- **Real-world example**: LinkedIn connections network - -### Spatial Data - -- **Definition**: Contains geographic or geometric information -- **Examples**: GIS data, maps, satellite imagery -- **Characteristics**: - - Contains coordinates or shape information - - Often requires specialized tools -- **Real-world example**: Census data with geographic coordinates - -### Temporal Data - -- **Definition**: Emphasizes time dimension -- **Examples**: Time series, event logs, historical records -- **Characteristics**: - - Time-stamped - - May show patterns over time -- **Real-world example**: Server logs with timestamp for each entry - -## Key Considerations for Beginners - -### Data Quality Assessment - -- **Completeness**: Missing values, coverage -- **Accuracy**: Errors, outliers, validity -- **Consistency**: Internal contradictions, logical issues -- **Timeliness**: How recent is the data? - -### Ethical Considerations - -- **Privacy**: Personal identifiable information (PII) -- **Consent**: Was data collected with proper consent? -- **Bias**: Is the sample representative? -- **Transparency**: Can methods and sources be disclosed? diff --git a/2_data_preparation/README.md b/2_data_preparation/README.md deleted file mode 100644 index 58add43..0000000 --- a/2_data_preparation/README.md +++ /dev/null @@ -1 +0,0 @@ -# Data Preparation diff --git a/2_data_preparation/guide.md b/2_data_preparation/guide.md deleted file mode 100644 index 212602e..0000000 --- a/2_data_preparation/guide.md +++ /dev/null @@ -1,19 +0,0 @@ -# Data Preparation: Guide - -This folder is for any Python scripts or notebooks you use to clean & prepare -your datasets. These files should: - -1. Read in datasets from `0_datasets` -2. Clean, reformat, or otherwise process the datasets for later. -3. Write the processed dataset into `0_datasets` with a helpful file name. - -**DO NOT modify an existing dataset in `0_datasets`! Instead, save your -processed data to a _new_ file.** This is critical to open research: Someone -should be able to clone this repository and run your scripts to replicate your -research. If you modify an original dataset, others cannot replicate your work. - -## README.md - -Use this folder's README to give a quick summary of each script/notebook - which -dataset(s) does it process and how? which datasets does it create and save to -`/1_datasets`?. diff --git a/rag_and_distilled_model/Apollo11_rag&distilled.ipynb b/2_open_source_models/distilled_models/rag_and_distilled_model/Apollo11_rag&distilled.ipynb similarity index 100% rename from rag_and_distilled_model/Apollo11_rag&distilled.ipynb rename to 2_open_source_models/distilled_models/rag_and_distilled_model/Apollo11_rag&distilled.ipynb diff --git a/rag_and_distilled_model/README.md b/2_open_source_models/distilled_models/rag_and_distilled_model/README.md similarity index 100% rename from rag_and_distilled_model/README.md rename to 2_open_source_models/distilled_models/rag_and_distilled_model/README.md diff --git a/test_dataset_apollo11/test_data.json b/2_open_source_models/distilled_models/rag_and_distilled_model/data/test_data.json similarity index 100% rename from test_dataset_apollo11/test_data.json rename to 2_open_source_models/distilled_models/rag_and_distilled_model/data/test_data.json diff --git a/rag_and_distilled_model/emissions.csv b/2_open_source_models/distilled_models/rag_and_distilled_model/emissions.csv similarity index 100% rename from rag_and_distilled_model/emissions.csv rename to 2_open_source_models/distilled_models/rag_and_distilled_model/emissions.csv diff --git a/rag_and_distilled_model/model_answers.md b/2_open_source_models/distilled_models/rag_and_distilled_model/model_answers.md similarity index 100% rename from rag_and_distilled_model/model_answers.md rename to 2_open_source_models/distilled_models/rag_and_distilled_model/model_answers.md diff --git a/mistral7b/data/Apllo.pdf b/2_open_source_models/quantized_models/mistral7b/data/Apllo.pdf similarity index 100% rename from mistral7b/data/Apllo.pdf rename to 2_open_source_models/quantized_models/mistral7b/data/Apllo.pdf diff --git a/mistral7b/mistral7b responses.pdf b/2_open_source_models/quantized_models/mistral7b/mistral7b responses.pdf similarity index 100% rename from mistral7b/mistral7b responses.pdf rename to 2_open_source_models/quantized_models/mistral7b/mistral7b responses.pdf diff --git a/mistral7b/model_rag_carbon.ipynb b/2_open_source_models/quantized_models/mistral7b/model_rag_carbon.ipynb similarity index 100% rename from mistral7b/model_rag_carbon.ipynb rename to 2_open_source_models/quantized_models/mistral7b/model_rag_carbon.ipynb diff --git a/mistral7b/model_rag_carbon_recursive.ipynb b/2_open_source_models/quantized_models/mistral7b/model_rag_carbon_recursive.ipynb similarity index 100% rename from mistral7b/model_rag_carbon_recursive.ipynb rename to 2_open_source_models/quantized_models/mistral7b/model_rag_carbon_recursive.ipynb diff --git a/mistral7b/readme.md b/2_open_source_models/quantized_models/mistral7b/readme.md similarity index 100% rename from mistral7b/readme.md rename to 2_open_source_models/quantized_models/mistral7b/readme.md diff --git a/google_gemma/README.md b/2_open_source_models/slm/google_gemma/README.md similarity index 100% rename from google_gemma/README.md rename to 2_open_source_models/slm/google_gemma/README.md diff --git a/google_gemma/data/source.txt b/2_open_source_models/slm/google_gemma/data/source.txt similarity index 100% rename from google_gemma/data/source.txt rename to 2_open_source_models/slm/google_gemma/data/source.txt diff --git a/google_gemma/emissions.csv b/2_open_source_models/slm/google_gemma/emissions.csv similarity index 100% rename from google_gemma/emissions.csv rename to 2_open_source_models/slm/google_gemma/emissions.csv diff --git a/google_gemma/emissions_base_27c11b81-b7a7-47c9-ab82-9ae317559fe9.csv b/2_open_source_models/slm/google_gemma/emissions_base_27c11b81-b7a7-47c9-ab82-9ae317559fe9.csv similarity index 100% rename from google_gemma/emissions_base_27c11b81-b7a7-47c9-ab82-9ae317559fe9.csv rename to 2_open_source_models/slm/google_gemma/emissions_base_27c11b81-b7a7-47c9-ab82-9ae317559fe9.csv diff --git a/google_gemma/emissions_base_685c527f-27ef-47b7-9955-a38ea34424e8.csv b/2_open_source_models/slm/google_gemma/emissions_base_685c527f-27ef-47b7-9955-a38ea34424e8.csv similarity index 100% rename from google_gemma/emissions_base_685c527f-27ef-47b7-9955-a38ea34424e8.csv rename to 2_open_source_models/slm/google_gemma/emissions_base_685c527f-27ef-47b7-9955-a38ea34424e8.csv diff --git a/google_gemma/emissions_base_7005ea2e-d9a3-40aa-900e-f95b5e5623a2.csv b/2_open_source_models/slm/google_gemma/emissions_base_7005ea2e-d9a3-40aa-900e-f95b5e5623a2.csv similarity index 100% rename from google_gemma/emissions_base_7005ea2e-d9a3-40aa-900e-f95b5e5623a2.csv rename to 2_open_source_models/slm/google_gemma/emissions_base_7005ea2e-d9a3-40aa-900e-f95b5e5623a2.csv diff --git a/google_gemma/emissions_base_f2cce301-9b4f-4e6a-b97d-5179efd64332.csv b/2_open_source_models/slm/google_gemma/emissions_base_f2cce301-9b4f-4e6a-b97d-5179efd64332.csv similarity index 100% rename from google_gemma/emissions_base_f2cce301-9b4f-4e6a-b97d-5179efd64332.csv rename to 2_open_source_models/slm/google_gemma/emissions_base_f2cce301-9b4f-4e6a-b97d-5179efd64332.csv diff --git a/google_gemma/gemma.ipynb b/2_open_source_models/slm/google_gemma/gemma.ipynb similarity index 100% rename from google_gemma/gemma.ipynb rename to 2_open_source_models/slm/google_gemma/gemma.ipynb diff --git a/google_gemma/google_gemma_responses.pdf b/2_open_source_models/slm/google_gemma/google_gemma_responses.pdf similarity index 100% rename from google_gemma/google_gemma_responses.pdf rename to 2_open_source_models/slm/google_gemma/google_gemma_responses.pdf diff --git a/3_data_exploration/README.md b/3_data_exploration/README.md deleted file mode 100644 index 8d180e0..0000000 --- a/3_data_exploration/README.md +++ /dev/null @@ -1 +0,0 @@ -# Data Exploration diff --git a/3_data_exploration/guide.md b/3_data_exploration/guide.md deleted file mode 100644 index 9064ec0..0000000 --- a/3_data_exploration/guide.md +++ /dev/null @@ -1,28 +0,0 @@ -# Data Exploration: Guide - -This folder is for any Python scripts or notebooks you use to _explore and -understand_ your datasets. These files should: - -1. Read in prepared datasets from `0_datasets` -2. Explore and understand the dataset without running a deep analysis: - - Generate some visualizations (in a notebook, or in a separate image file - saved to this folder) - - Run some descriptive statistics - (_[beware](https://www.researchgate.net/publication/316652618_Same_Stats_Different_Graphs_Generating_Datasets_with_Varied_Appearance_and_Identical_Statistics_through_Simulated_Annealing) - the - [Datasaurus Dozen](https://www.research.autodesk.com/publications/same-stats-different-graphs/)!_) - - ... let your curiosity guide you, but _avoid_ running any inferential - statistics or using any machine learning at this stage. - -**DO NOT modify an existing dataset in `0_datasets`!** This is critical to open -research: Someone should be able to clone this repository and run your scripts -to replicate your research. If you modify an original dataset, others cannot -replicate your work. - -> [Chapter 4 - Exploratory Data Analysis](https://bookdown.org/rdpeng/artofdatascience/exploratory-data-analysis.html) -> from the Art of Data Science is a good starting reference. - -## README.md - -Use the README in this folder to give a quick summary of each script/notebook - -which dataset(s) it explores, and how. diff --git a/4_data_analysis/README.md b/4_data_analysis/README.md deleted file mode 100644 index b0957fc..0000000 --- a/4_data_analysis/README.md +++ /dev/null @@ -1 +0,0 @@ -# Data Analysis diff --git a/4_data_analysis/guide.md b/4_data_analysis/guide.md deleted file mode 100644 index 1f92e90..0000000 --- a/4_data_analysis/guide.md +++ /dev/null @@ -1,23 +0,0 @@ -# Data Analysis: Guide - -This folder is for any Python scripts or notebooks you use to gain insights from -your data through modeling, inferential statistics, and other analytical -techniques. These files should: - -1. Read in prepared datasets from `0_datasets` -2. Learn from your datasets using methods that are appropriate to your research - question, dataset and team's constraints. - -**DO NOT modify an existing dataset in `0_datasets`!** This is critical to open -research: Someone should be able to clone this repository and run your scripts -to replicate your research. If you modify an original dataset, others cannot -replicate your work. - -> [Chapters 5-8](https://bookdown.org/rdpeng/artofdatascience) from the Art of -> Data Science are a good starting reference. - -## README.md - -Use the README in this folder to document your analysis strategy and provide a -quick summary of each script/notebook. You can also explain your research -results in-depth in this folder's README. diff --git a/README.md b/README.md deleted file mode 100644 index a5069d4..0000000 --- a/README.md +++ /dev/null @@ -1,200 +0,0 @@ -# 🌱 ELO2 – GREEN AI - -***Comparing Commercial and Open-Source Language Models for*** -***Sustainable AI*** - -This repository presents the **ELO2 – GREEN AI Project**, developed -within the **MIT Emerging Talent – AI & ML Program (2025)**. The work -investigates the technical performance, sustainability traits, and -human-perceived quality of **open-source language models** -compared to commercial systems. - ---- - -## 🔍 Project Overview - -### Research Question - -**To what extent can open-source LLMs provide competitive output quality -while operating at significantly lower environmental cost?** - -![image](readme_images/trade-off.png) - -### Motivation - -Large commercial LLMs deliver strong performance but demand substantial -compute and energy. This project examines whether **small, accessible, -and environmentally efficient open-source models**—especially when -enhanced with retrieval and refinement pipelines—can offer practical -alternatives for everyday tasks. - ---- - -## 🧪 Methods - -![image](readme_images/project-timeline.png) - -### 1. Model Families - -The study evaluates several open-source model groups: - -- **Quantized Model:** Mistral-7B (GGUF) -- **Distilled Model:** LaMini-Flan-T5-248M -- **Small Models:** Qwen, Gemma -- **Enhanced Pipelines (applied to all model families):** - - **RAG (Retrieval-Augmented Generation)** - - **Recursive Editing** - - includes AI-based critique and iterative refinement - -These configurations serve as the optimized open-source setups used in -the comparison against commercial models. - -### 2. Tasks & Dataset - -Evaluation tasks include: - -- summarization -- factual reasoning -- paraphrasing -- short creative writing -- instruction following -- question answering - -A targeted excerpt from the **Apollo-11 mission transcripts** served as -the central reference text for all evaluation tasks. All prompts were constructed -directly from this shared material. Using a single, consistent source ensured -that every model was tested under identical informational conditions, allowing -clear and fair comparison of output quality and relevance. - -### 3. RAG Pipeline - -Retrieval-Augmented Generation (RAG) was applied to multiple model -families. The pipeline includes: - -- document indexing -- dense similarity retrieval -- context injection through prompt augmentation -- answer synthesis using guidance prompts - -RAG improved factual grounding in nearly all models. - -### 4. Recursive Editing Framework - -A lightweight iterative refinement procedure was implemented: - -1. **Draft Generation:** - The primary model produces an initial output. - -2. **AI-Based Critique:** - A secondary SLM evaluates clarity, accuracy, faithfulness and relevance. - -3. **Refinement Step:** - A revision prompt integrates critique and generates an improved text. - -4. **Stopping Condition:** - The cycle ends after a fixed number of iterations or when critique - stabilizes. - -This approach allowed weaker SLMs to yield higher-quality results -without relying on large models. - -### 5. Environmental Measurement - -Environmental footprint data was captured with **CodeCarbon**, recording: - -- CPU/GPU energy usage -- Carbon emissions -- PUE-adjusted overhead - -These measurements enabled comparison with published metrics for -commercial LLMs. - -### 6. Human Evaluation (Single-Blind) - -A structured Google Form experiment collected: - -- **source identification** (commercial vs. open-source) -- **quality ratings** on accuracy, faithfulness, relevance, and clarity - (1–5 scale) - -Outputs were randomized and anonymized to avoid bias. This provided a -perception-based counterpart to technical evaluation. - -### 7. Analysing the Results - -.... - -### 8. Publishing an Article - -.... - ---- - -## 📊 Key Findings - -- FINDING1..... -- FINDING2..... -- FINDING3..... -- FINDING4..... - ---- - -## 🔮 Future Work - -- Evaluate additional open-source model families across diverse tasks -- Test optimized pipelines in specialized domains (medical, legal, technical writing) -- Track carbon footprint across full lifecycle (training to deployment) -- Conduct ablation studies isolating RAG vs. recursive editing contributions - ---- - -## 📢 Communication Strategy - -The research findings will be shared through formats designed for different -audiences and purposes: - -### For Researchers - -A comprehensive research article will document the complete experimental design, -statistical analysis, and implications. - -🔗 **[View Aticle](link1)** - -### For Practitioners & Educators - -An executive presentation provides a visual overview of the research question, -methodology, and key findings without requiring deep technical background. - -🔗 **[View Presentation](link2)** - -### For the Community - -A public evaluation study invites participation in assessing AI-generated texts. -This crowdsourced data forms a critical component of the research. - -🔗 **[Participate in Study](link3)** - -### For Reproducibility - -All materials (dataset, prompts, model outputs, evaluation scripts, and carbon -tracking logs) are publicly available in this repository. - -🔗 **[Browse Repository](https://github.com/banuozyilmaz2-jpg/ELO2-GREEN-AI)** - ---- - -## 👥 Contributors - -- [Amro Mohamed](https://github.com/Elshikh-Amro) -- [Aseel Omer](https://github.com/AseelOmer) -- [Banu Ozyilmaz](https://github.com/doctorbanu) -- [Caesar Ghazi](https://github.com/CaesarGhazi) -- [Reem Osama](https://github.com/reunicorn1) -- [Safia Gibril Nouman](https://github.com/Safi222) - ---- - -## 🙏 Acknowledgments - -Special thanks to the **MIT Emerging Talent Program** for their guidance and -feedback throughout the project. diff --git a/collaboration/README.md b/collaboration/README.md deleted file mode 100644 index 20889b9..0000000 --- a/collaboration/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Collaboration - - - - diff --git a/collaboration/communication.md b/collaboration/communication.md deleted file mode 100644 index f9e2d01..0000000 --- a/collaboration/communication.md +++ /dev/null @@ -1,53 +0,0 @@ - - -# Communication - ---- - -## Communication Schedule - -| Day | How | The topic of discussion | -| :----: | :-: | ----------------------: | -| | | | - -## Communication Channels - -How often we will get in touch on each channel, and what we will discuss there: - -- **Issues**: -- **Pull Requests**: -- **Slack/Discord**: -- **Video Calls**: - -## Availability - -### Availability for calling/messaging - -| Day | Mon | Tue | Wed | Thu | Fri | Sat | Sun | -|--------|:---:|:---:|:---:|:---:|:---:|:---:|:---:| -| _name_ | | | | | | | | - -### How many hours everyone has per day - -- name: _5h_ -- name: _6h_ -- name: _5h_ -- name: _4h_ -- name: _3h_ - -## Asking for Help - -There's a fine line between confidently learning from your mistakes and -stubbornly getting nowhere. Here is a general guide for when to ask for help -based on how long you’ve been stuck on the same problem: - -1. _0 → 30 min_: Try on your own -2. _30 → 60 min_: Ask your group for help -3. _60+ min_: Tag your coaches in Slack, WhatsApp or GitHub diff --git a/collaboration/constraints.md b/collaboration/constraints.md deleted file mode 100644 index 2407950..0000000 --- a/collaboration/constraints.md +++ /dev/null @@ -1,34 +0,0 @@ - - -# Constraints - -Some boundaries around our project. - -## External - - - -## Internal: Involuntary - - - -## Internal: Voluntary - - diff --git a/collaboration/guide/0_repository_setup.md b/collaboration/guide/0_repository_setup.md deleted file mode 100644 index 444bcc7..0000000 --- a/collaboration/guide/0_repository_setup.md +++ /dev/null @@ -1,43 +0,0 @@ -# Repository Setup - -Before diving into your project make sure your team has all the practical things -in place. This step isn't very hard but everything will go more smoothly if you -take the time to do this correctly at the beginning. - -## Setup and Share a Repository - -As a team you will choose the name for your team and select someone from your -team to be the repo owner. This person will fork this repository and configure -it for collaboration: - -- Public face of your repository - - Change your - [repository description](https://stackoverflow.com/questions/7757751/how-do-you-change-a-repository-description-on-github) - - Add or remove topics from your repository - - Update your main README with your group name and an initial overview of your - project. (You can change this as much as you want.) -- Under settings in your repository select: - - _Issues_ - - _Projects_ - - _Always suggest updating pull request branches_ -- Enable CI Checks - - Under the _Actions_ tab: - - Enable workflows to ensure CI checks run on all pushes and pull requests. - _(Note: If workflows are disabled, CI checks won’t trigger until enabled.)_ -- Collaboration Settings - - Require a code review for PRs to `main`/`master` - ([owanateamachree](https://owanateamachree.medium.com/how-to-protect-the-master-branch-on-github-ab85e9b6b03), - [github docs](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/approving-a-pull-request-with-required-reviews)) - - You will need to type `master` into the _Branch name pattern_ input box. - (or type `main` if you have changed your default branch) - - Select these settings to protect matching branches: - - _Require approvals_ - - _Dismiss stale pull request approvals when new commits are pushed_ - - _Require approval of the most recent reviewable push_ - - _Require conversation resolution before merging_ - - _Do not allow bypassing the above settings_ - -## README - -Write the [main README](../../README.md) for your repository! You can always -update it as you learn more about code review and collaboration on GitHub. diff --git a/collaboration/guide/1_group_norms.md b/collaboration/guide/1_group_norms.md deleted file mode 100644 index c7bd0e3..0000000 --- a/collaboration/guide/1_group_norms.md +++ /dev/null @@ -1,4 +0,0 @@ -# Group Norms - -Complete the _Group Norms_ assignment from the Code Review workshop and write -your norms summary and list in the [Collaboration README](../README.md) diff --git a/collaboration/guide/2_learning_goals.md b/collaboration/guide/2_learning_goals.md deleted file mode 100644 index dd660b2..0000000 --- a/collaboration/guide/2_learning_goals.md +++ /dev/null @@ -1,26 +0,0 @@ -# Objectives - -The first step to developing a software project is to make sure everyone agrees -on the project objectives. For these exercises, setting objectives means -discussing what everyone wants to learn from these exercises. - -For example, do you want to; - -- Review DSs and algorithms you have already studied. -- Learn new DSs and algorithms. -- Practice using new programming language features. -- Practice more complete, readable, and documented solutions. -- Learn to program faster, to be more a more efficient developer. -- Focus on code review, going through several cycles of feedback for each - solution. -- ... or anything else you can think of - -There are no wrong answers, it's ok if everyone has different objectives! Shat's -important is that everyone has the same expectations. - -## `/collaboration/learning_goals.md` - -Write your group's objectives for these exercises in -[/collaboration/learning_goals.md](../learning_goals.md). If different members -have different objectives, you can write this in the `learning_goals.md` file. -You don't need to have one description for everyone! diff --git a/collaboration/guide/3_constraints.md b/collaboration/guide/3_constraints.md deleted file mode 100644 index ad18e64..0000000 --- a/collaboration/guide/3_constraints.md +++ /dev/null @@ -1,20 +0,0 @@ -# Constraints - -After dreaming big, the most helpful thing you can can do to define your project -is to be clear about _what it isn't_. Thinking big is great for brainstorming -but can get in the way of finishing your projects on time. - -An important step to _scoping_ your project is to discuss your group's -**constraints** before you even begin thinking of user stories. Defining your -constraints will help to define a realistic project and commit to it. -Constraints can also help you to build a more creative project! - -It may not sound intuitive, but the fewer restrictions you have on your work the -less likely you are to find a creative solution! The crucial skills you need to -learn is how to place _constructive_ constraints on your project and on your -team. - -## `/collaboration/constraints.md` - -Fill out the template file in [/collaboration/constraints.md](../constraints.md) -as a group, either in a call or asynchronously. diff --git a/collaboration/guide/4_communication.md b/collaboration/guide/4_communication.md deleted file mode 100644 index 8cbaacc..0000000 --- a/collaboration/guide/4_communication.md +++ /dev/null @@ -1,33 +0,0 @@ -# Communication Plan - -Decide as a team when you will be able to meet, and how you'll stay in touch -between meetings. The example in this repository gives some ideas for organizing -this file, but it's up to your group to find what works best for you. - -This is one of the most important steps in collaboration! Group projects go by -very quickly, often you'll only have one week to plan and deliver a milestone. -Taking a few extra minutes to plan before starting can save you a day or two of -confusion during the sprint. - -## Meeting Agendas - -Writing a meeting agenda ahead of time is always a good idea. There is no right -way to plan a meeting, your group should search online and experiment to find -what works best for you. - -This repository has a _template issue_ you can use to create a meeting agenda. -Each time your group is planning a meeting you can create a new issue using this -template and write a meeting agenda. You can use comments to discuss -before/after the meeting, you can add meeting minutes in the issue body or -comments, and close the issue when all meeting points have been addressed. - -## `/collaboration/communication.md` - -> Careful! Be sure to protect your privacy when filling out this document. -> Everything you write here will be public, so share only what you are -> comfortable sharing online. You can share the rest in confidence with you -> group by another channel. - -Fill out the template file in -[/collaboration/communication.md](../communication.md) as a group, either in a -call or asynchronously. diff --git a/collaboration/guide/5_project_board.md b/collaboration/guide/5_project_board.md deleted file mode 100644 index a0d5694..0000000 --- a/collaboration/guide/5_project_board.md +++ /dev/null @@ -1,38 +0,0 @@ -# Project Board - -Setting up your Project Board should be easy enough, using it well will take a -lot of discipline and attention to detail! - -## Columns - -Finally, create your project board and add these columns: - -1. **TODO**: Unclaimed challenges that no one is working on. -1. **DOING** Challenges someone has assigned to themselves and is actively - working on. - - Each group member can only have _one_ claimed issue in the DOING column at - a time. - - Use the `help wanted` label in this column if you are blocked. -1. **READY FOR REVIEW**: Issues with a linked PR to review. - - After your issue is READY FOR REVIEW you can claim a new challenge from the - TODO column. -1. **UNDER REVIEW**: After assigning yourself to review a challenge in the READY - FOR REVIEW column, move the issue to this column. - - Issues stay in this column until the reviewer has approved the solution and - merged it to `main`. -1. **DONE**: After merging a solution and closing the issue, the reviewer moves - challege issued to this column. - -## Issues - -Create one _**issue**_ for each interesting challenge you think someone in your -group may want to solve. I's always ok to add more issue later, so no need to -spend hours cluttering the project board right away. There's also no maximum! -Your TODO column can be as full as you like. - -Be sure each issue has: - -- A full description of the challenge. This doesn't need to be as thorough as a - docstring, but it should be clear enough that others can start solving it - without asking you for help. -- Links to any helpful references or resources for solving this challenge. diff --git a/collaboration/guide/6_development.md b/collaboration/guide/6_development.md deleted file mode 100644 index 655c5b5..0000000 --- a/collaboration/guide/6_development.md +++ /dev/null @@ -1,60 +0,0 @@ -# Development - -Everything is planned, the board is ready, it's time to get working. As you -develop your project, you will keep track of everything on the project board. - -## `/notes` - -This repository comes with an empty [/notes](./notes) folder your group can use -to keep shared notes on everything you learn about Python, Git, GitHub, code -review, collaboration ... or life : ) - -## `help-wanted` Label - -Add this label to your issue when you're blocked. Your team mates will be able -to see this on the project board or by filtering all the project issues. - -## Opening New Issues - -Issues are flexible, you can always create new issues or update existing ones -after the project has started. Up to now you have created issues for planned -tasks, but issues can be used for all sorts of tasks. Especially tasks that you -weren't able to plan for. - -Some common reasons to create new issues: - -- **Bugs**: if you find a _bug_ in the `master`/`main` branch you can open a new - issue, describe the problem, and attach the `bug` label. Someone can claim the - issue, fix the bug on a new branch, and open a PR. -- **Enhancements**: if you have an idea to improve code that _already exists_ in - the `main` branch you can open a new issue, describe the changes you have in - mind, and add the `enhancement` label. If some has time they can claim this - issue, develop the enhancement on a new branch, and send a PR. - -## Claim, Branch, Review, Merge - -This is it, the real deal. Finally you can write some code! - -You and your team mates will each work on one task at a time, asking for help -and tracking your progress with the project board. - -You will be responsible for writing your code on a new branch, and your team -mates will be responsible for reviewing and merging your code to main/master. As -a team you should make your best effort to use _branches_ for organizing your -work. If you wrote a good development strategy you will already be half-way -there. - -It will take a little while to get used to this workflow, but once you do -there's no going back: - -### Branching Strategy - -Your group's git branches should look something like this: - -[![branching strategy](./assets/branching_strategy.svg)](https://excalidraw.com/#json=IBDwrOShConw1SJwEqvii,-a8F2kZpRHiR2itGLU7IiA) - -## Individual Workflow - -Your individual workflow should look something like this: - -[![claim, branch, review, merge](./assets/claim_branch_review_merge.svg)](https://excalidraw.com/#json=-kZH0xSi4KCTru2JePZQV,X9da-5Fl_9AdgMdIbW1X5Q) diff --git a/collaboration/guide/7_retrospective.md b/collaboration/guide/7_retrospective.md deleted file mode 100644 index 5924ca3..0000000 --- a/collaboration/guide/7_retrospective.md +++ /dev/null @@ -1,56 +0,0 @@ -# Retrospective - -> “Regardless of what we discover, we understand and truly believe that everyone -> did the best job they could, given what they knew at the time, their skills -> and abilities, the resources available, and the situation at hand.” -> -> - [Norm Kerth](http://www.amazon.com/Project-Retrospectives-Handbook-Reviews-Dorset-ebook/dp/B00DY3KQJU/ref=tmm_kin_swatch_0?_encoding=UTF8&sr=&qid=) - -All done :) - -Your team has either finished the project ahead of time, or the deadline has -arrived and you're still not finished. No problem either way! - -A retrospective is meant for looking back at how the project went and learning -what to do differently next time. An incomplete project with a great -retrospective is still a success. - -## Behaviors, not People - -Focus on what your group can _do_ that will make the next sprint better. A huge -list of things to stop doing doesn't help you move forward. - -Retrospectives are all about behaviors that can be changed, not people that need -fixing. Keep your retrospectives _positive_ and _general_. **_You should NEVER -mention people by name!!!_** - -## Strategy vs. Board - -Take some time as a group to compare your Development Strategy to your Project -Board. - -- What parts of your plan went as expected? -- What parts of your plan did not work out? -- Did you need to add things that weren't in your strategy? -- Or remove extra steps? - -## The Four Points - -Your group will summarize your work by providing answers to these three -questions. You can have more than one answer! You can answer with bullet points, -paragraphs, links, images ... whatever is most clear. - -- **Stop Doing**: what mistakes did you make? did you miss a step? could your - communication have been better? be specific! -- **Continue Doing**: what went well that you should continue doing in the next - project? be specific! -- **Start Doing**: any good ideas you'd like to try out in the next project? be - specific! -- **Lessons Learned**: what cool things or general lessons has your team - learned? This can be about anything, not just code! - -## `/collaboration/retrospective.md` - -Fill out the template file in -[/collaboration/retrospective.md](../retrospective.md) as a group, either in a -call or asynchronously. diff --git a/collaboration/guide/README.md b/collaboration/guide/README.md deleted file mode 100644 index 6cd2bc9..0000000 --- a/collaboration/guide/README.md +++ /dev/null @@ -1,214 +0,0 @@ -# Group Study: Guide - -A brief guide to collaboration in this repository. It covers repository -configuration, setting learning goals, basic collaboration documents, and a -branching workflow. - -There is hopefully enough in here to get you started, but we know there are -still gaps. Let us know what's missing so we can fill it in! - -______________________________________________________________________ - -## Suggested Study - -Emerging Talent workshops that may be helpful: - -- [Documenting and Testing](https://github.com/MIT-Emerging-Talent/documenting-and-testing) -- [Debugging](https://github.com/MIT-Emerging-Talent/debugging/) -- [TDD with LLMs](https://github.com/MIT-Emerging-Talent/test-driven-development-with-large-language-models) - -### Local Development Without Git - -#### Visual Studio Code (VSCode) - -VSC is a very popular development environment. Take a look through these links -to start learning all you can do with VSC. You don't need to understand -everything right away, there will be more than enough time to practice: - -- [ArjanCodes](https://www.youtube.com/watch?v=fj2tuTIcUys) - _setting up - VSCode_ -- [academind VSC tutorial](https://www.youtube.com/watch?v=VqCgcpAypFQ) -- [shortcuts cheatsheet](https://vscode-shortcuts.com/) -- [The Coding Train](https://www.youtube.com/watch?v=yJw0SyKO9IU) -- VSC Intro from VSC - - [tutorial step-through](https://code.visualstudio.com/docs/introvideos/basics) - - [get started](https://code.visualstudio.com/docs/getstarted/introvideos) -- Are you using Windows? - - [Windows Subsystem for Linux](https://docs.microsoft.com/en-us/windows/wsl/install-win10) - - [WSL with VSCode](https://docs.microsoft.com/en-us/windows/wsl/tutorials/wsl-vscode) - - [nvm command not found](https://dev.to/duhbhavesh/nvm-command-not-found-1ho) - -#### VSCode Debugging - -Some tutorial series covering how to use the Python debugger in VSCode. These -tutorials are not in a special order, look around and find the one that works -best for you: - -- [Boris Paskhaver](https://www.youtube.com/playlist?list=PLQzZ4krxwT9Yay3kz8ly4wXiYJHzMtsWi) -- [Ghost Together](https://www.youtube.com/watch?v=oCcTiRGPogQ) -- [Tech with Tim](https://www.youtube.com/watch?v=7qZBwhSlfOo) -- [The examples in this repo](https://denepo.js.org/watch/?url=https://raw.githubusercontent.com/MIT-Emerging-Talent/debugging/main/0_stepping_through/guide.mp4) - -#### VSCode Extensions - -[Guide for installing extensions in VSCode.](https://code.visualstudio.com/learn/get-started/extensions) - -- [Python](https://marketplace.visualstudio.com/items?itemName=ms-python.python) -- [Pylance](https://marketplace.visualstudio.com/items?itemName=ms-python.vscode-pylance) -- [Python Preview](https://marketplace.visualstudio.com/items?itemName=dongli.python-preview) - (basically Python Tutor inside VSCode) -- [Test Explorer](https://marketplace.visualstudio.com/items?itemName=LittleFoxTeam.vscode-python-test-adapter) -- [Python Indent](https://marketplace.visualstudio.com/items?itemName=KevinRose.vsc-python-indent) - -#### Command Line Interface (CLI) - -- [relative vs. absolute paths](https://www.youtube.com/watch?v=ephId3mYu9o) -- [The Coding Train](https://www.youtube.com/watch?v=FnkkzgYuXUM&list=PLRqwX-V7Uu6Zu_uqEA6NqhLzKLACwU74X&index=3) -- [Matt's Lectures and Tutorials](https://www.youtube.com/watch?v=mUXVBMhr7Xg) -- [Jesse Showalter](https://www.youtube.com/watch?v=5XgBd6rjuDQ) -- [Enough to be Dangerous](https://www.learnenough.com/command-line-tutorial) -- CLI games: - - [bashcrawl](https://gitlab.com/slackermedia/bashcrawl/) - clone & play - - [Terminus](https://web.mit.edu/mprat/Public/web/Terminus/Web/main.html) - - online - - [iTerm](https://sr6033.github.io/lterm/) - online -- [A huge cheat sheet](https://gist.github.com/LeCoupa/122b12050f5fb267e75f) - -______________________________________________________________________ - -### Local Development With Git - -#### Git - -- Learn to visualize what happens inside of Git with: - - [learngitbranching](https://learngitbranching.js.org/) + - [a Video Guide](https://www.youtube.com/watch?v=dG0ke9vILQM) - - [git-school](https://git-school.github.io/visualizing-git/) - - [ohmygit](https://ohmygit.org/) - a git game -- [Git and GitHub for Poets](https://www.youtube.com/playlist?list=PLRqwX-V7Uu6ZF9C0YMKuns9sLDzK6zoiV) - (also talks about GitHub) -- [merge-a-matic](https://github.com/lpmi-13/merge-a-matic) -- [Git Katas](https://github.com/eficode-academy/git-katas) -- [git-it](https://github.com/jlord/git-it-electron/) -- [Understand how to use Atomic Commits](https://curiousprogrammer.io/blog/how-to-craft-your-changes-into-small-atomic-commits-using-git) -- [rebasic](https://github.com/lpmi-13/rebasic) - -______________________________________________________________________ - -### Local/Remote Development - -#### GitHub - -- [lab.github.com/githubtraining](https://lab.github.com/githubtraining/paths/) - - [first day on github](https://lab.github.com/githubtraining/first-day-on-github) - - [first week on github](https://lab.github.com/githubtraining/first-week-on-github) - - [prepare to use github](https://lab.github.com/githubtraining/prepare-to-use-github) -- [Getting Started with GitHub](https://help.github.com/en/github/getting-started-with-github) -- [Creating a GitHub Repository](https://www.youtube.com/watch?v=WfhRyz3Wf4o) -- [Connecting to GitHub with SSH](https://www.youtube.com/watch?v=p4Jhf729jgg) -- [Creating a local repo and push](https://www.youtube.com/watch?v=vbQ2bYHxxEA) -- [GitHub & VSCode](https://www.youtube.com/watch?v=ZDo0Qht5D6w) -- and much more at - [hackyourfuture.github.io/study](https://hackyourfuture.github.io/study) -- lab.github.com: - - [First Day on GitHub](https://lab.github.com/githubtraining/first-day-on-github) - - [First Week on GitHub](https://lab.github.com/githubtraining/first-week-on-github) -- [The Net Ninja](https://www.youtube.com/watch?v=QV0kVNvkMxc&list=PL4cUxeGkcC9goXbgTDQ0n_4TBzOO0ocPR&index=8) - -______________________________________________________________________ - -### Remote Collaboration - -- [Building Software Together](https://buildtogether.tech/) _a student's guide - to being a compassionate programmer_ - The technical parts are more advanced - than what you're learning now, but all the rest is gold. - -### Collaborating on GitHub - -- [Adding collaborators to a repository](https://www.youtube.com/watch?v=p49LRx3hYI8) -- [about code reviews](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-request-reviews) -- [requesting a code review](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/requesting-a-pull-request-review) -- [prevent pushing to `main`](https://stackoverflow.com/a/57685576) -- [Git Workflow for 2](https://github.com/hackyourfuturebelgium/git-workflow-workshop-for-two) -- [Pull Requests](https://www.youtube.com/watch?v=2M16faxEQsg) -- [Git & GitHub for Poets](https://www.youtube.com/watch?v=BCQHnlnPusY&list=PLRqwX-V7Uu6ZF9C0YMKuns9sLDzK6zoiV) -- The Net Ninja: - [11](https://www.youtube.com/watch?v=MnUd31TvBoU&list=PL4cUxeGkcC9goXbgTDQ0n_4TBzOO0ocPR&index=11) -- linking PRs to Issues: - [reference 1](https://help.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue), - [reference 2](https://help.github.com/articles/autolinked-references-and-urls/) -- [closing Issues using keywords](https://help.github.com/en/enterprise/2.16/user/github/managing-your-work-on-github/closing-issues-using-keywords) - -______________________________________________________________________ - -### Open Source Development - -#### What is Open Source Software? - -- [Mozilla](https://www.youtube.com/watch?v=7c0IrsDsNaw) -- [Brian Daigle](https://www.youtube.com/watch?v=1ehpgbb3XD0) codes of conduct - and github walk-through: -- [Explained with Legos](https://www.youtube.com/watch?v=a8fHgx9mE5U) -- [And with recipes](https://www.youtube.com/watch?v=9ShgYrBkTRs) -- [Open Source vs. Closed Source](https://www.youtube.com/watch?v=2q91vTvc7YE) -- [Free/Libre vs. Open Source](https://www.youtube.com/watch?v=Ag1AKIl_2GM) - (think "free speech", not "free food") - -To learn more about all things Open, check out the -[Open Knowledge Foundation](https://okfn.org) and -[Open Knowledge Belgium](https://openknowledge.be). - -#### Open Source Licenses - -The license attached to an Open Source project is not just a detail! Check out -these links to learn more about the many licenses available: - -- [opensource.org](https://opensource.org/licenses) -- [choosealicense.com](https://choosealicense.com) -- [techsoup](https://www.techsoup.org/support/articles-and-how-tos/making-sense-of-software-licensing) -- [infoworld](https://www.infoworld.com/article/2839560/sticking-a-license-on-everything.html) -- copyleft: [what is this?](https://www.youtube.com/watch?v=6Xky8HTqaZo), - [copyleft.org](https://copyleft.org) -- :) [ErikMcClure/bad-licenses](https://github.com/ErikMcClure/bad-licenses) - -#### Codes of Conduct - -The Code of Conduct in an Open Source project describes how contributors should -treat each other. Open Source projects are about sharing and welcoming: - -- [Contributor Covenant](https://www.contributor-covenant.org) -- [opensource.guide](https://opensource.guide/code-of-conduct/) -- [opensourcedesign.net](https://opensourcedesign.net/code-of-conduct/) - -#### Contributor Guidelines - -Contributor Guidelines are important to standardize coding practices and -workflows for an Open Source project. You could think of it as describing how -the code should be treated: - -- [mozillascience](https://mozillascience.github.io/working-open-workshop/contributing/) -- [docs.github.com](https://docs.github.com/en/github/building-a-strong-community/setting-guidelines-for-repository-contributors) -- Templates - - [briandk](https://gist.github.com/briandk/3d2e8b3ec8daf5a27a62) - - [opensource.com](https://opensource.com/life/16/3/contributor-guidelines-template-and-tips) -- Examples - - [opensource.guide](https://github.com/github/opensource.guide/blob/main/CONTRIBUTING.md) - - [github/docs](https://github.com/github/docs/blob/main/CONTRIBUTING.md) - - [microsoft/vscode](https://github.com/microsoft/vscode/blob/main/CONTRIBUTING.md) - - [atom/atom](https://github.com/atom/atom/blob/master/CONTRIBUTING.md) - - [voxmedia](https://github.com/voxmedia/open-source-contribution-guidelines) - -#### How to Contribute - -- [opensource.guide](https://opensource.guide) -- [freecodecamp](https://github.com/FreeCodeCamp/how-to-contribute-to-open-source) -- [contribution-guide.org](https://www.contribution-guide.org) -- [redhat](https://www.redhat.com/en/resources/open-source-participation-guidelines-overview) -- [better-programming](https://medium.com/better-programming/4-effortless-steps-for-contributing-to-open-source-projects-35000599367b) -- Small contributions matter! - - [Why the GitHub metric monoculture?](https://medium.com/@leskis/why-the-github-metric-monoculture-d179a2f1d130) - - [pybot](https://github.com/lpmi-13/pypobot) - -and finally ... - -- [Contribute to this Project!](https://github.com/Syknapse/Contribute-To-This-Project) diff --git a/collaboration/guide/assets/branching_strategy.svg b/collaboration/guide/assets/branching_strategy.svg deleted file mode 100644 index a6d15ec..0000000 --- a/collaboration/guide/assets/branching_strategy.svg +++ /dev/null @@ -1,21 +0,0 @@ - - - eyJ2ZXJzaW9uIjoiMSIsImVuY29kaW5nIjoiYnN0cmluZyIsImNvbXByZXNzZWQiOnRydWUsImVuY29kZWQiOiJ4nO1da1fiStb+3r/C6ffrIVOXXbf55lxyxftd8e1Zrlx1MDAwMFx1MDAwMVwiXHUwMDE4XHUwMDEwgqizzn+fXbSSXGJcdFxyXGZITlx1MDAxZqtXd2sqJEVqP7Wffamd/3xbW/tcdTAwMWW+tL3v/1r77j2X3aZf6bj973/Y409ep+u3XHUwMDAy7GKD37utXqc8OLNcdTAwMWWG7e6//vnP6Fx1MDAxM0659fDzU17Te/CCsIvn/T/+vrb2n8G/sfs0/cBcdTAwMWKcOzhcdTAwMWHdRZDRg0etYHBDqrikUlBcdTAwMGXDM/zuXHUwMDE23in0KthddZtdL+qxh767lZdTVW7t1tktafqnl2HvotCL7lr1m83z8KU5XHUwMDE4ULeFX1wi6uuGnVbDu/YrYd3ee+R42qc6rV6tXHUwMDFleF37xaMv0mq7ZT98sdch0VE3qFxyrlx1MDAxMVx1MDAxZHnG36RhXHUwMDBlp5pcdTAwMDLDb0spkcNe+3mphKMpXHUwMDAzYTgx2siRYW22mq2OXHUwMDFk1v+RQYtcdTAwMDZWcsuNXHUwMDFhji6oROdoqT0jo3P671/WkUZwysSwp+75tXqIXYo4XHUwMDFj762irq43ePrCUGY0gFx1MDAxZXbYu7VcdTAwMGKVgVxi/Dt65Fx1MDAxZPfBK9hPXHUwMDA0vWYz/tyCyttze1x1MDAxN5VIWNjbkT+jr2PP345cdFl0h1674v6UXGKqXGLTQlx0Lrjiw35cdTAwMTS8xujtm61yI0GIuqHbXHQ3/KDiXHUwMDA3tdGPeEElpafpdsPN1sODXHUwMDFm4jBOWn5cdTAwMTCOnjG47nqn0+rXPXfsWeCVU/va9nJcdTAwMTGqbIt+WotEafDL8Od//5F49tg82zac4ejT3+L/v83B1HiWKlxyz1opzVxipdPD+eGw9qL6YfOsXGZhUelC2dtcdTAwMTflbMNZaeNcdTAwMDDwXHUwMDE0OIM0jtRqXGJnXHUwMDAyI1x1MDAwM1tcZqClwzXTiYBcdTAwMTaCO4JxSIA0w6VWXHUwMDAyg+hcdTAwMGJlXHUwMDBi0tGz+oL0e1x1MDAxYptp22JzvCBQU1wiRo9cdTAwMGW1NFx1MDAxM6BRbGKS/itY770+nrb31PFcdTAwMDXZuORyT4dHxZP7XHUwMDE0WNfdcr3X8VZcdTAwMGZsI5UjuNQmXHUwMDEx2FxmjKOAUZB6XHUwMDAxwFx1MDAwZTtu0G27XHUwMDFkREZcdTAwMTK4lVx1MDAxNDJcdNuUaHBcYjck3vtcdTAwMDZuSiU1XHUwMDAwXHUwMDAyouX5XHUwMDBi3dlH98epti0+yTOiO/Sew0R003R0XHUwMDFiQVx1MDAwMPW2YFOju/NaXHUwMDA393D9xnve3y5B/7DHdmQ98+jm3KHSKIJr2Ti6qZaOXHUwMDEwID9cdTAwMDHdlo4lKu5cdTAwMTjTXHUwMDFlQpozzank0dzMXHLpXHUwMDBmXHUwMDFks2FXaj5cdTAwMTd2q60gPPdfXHUwMDA3y1x1MDAwN/lwNO8++E373CNcdTAwMTI/XHUwMDEwXctcYl0/+Fx1MDAxMZTwXHSWYzJle9ebfi1cdTAwMTjoMK/6UcpDXHUwMDFm7dVhd9hqR71lvFx1MDAxYl7R64w/nVbHr/mB27yYeGerMHeH66/DYvPe9Vx1MDAwNtpcdTAwMTRnXHUwMDBlJqLSazb9djfZNobRg++45CiinOK/U8OSimr73DeN/PXJ/vHpJlx1MDAwNd7f3cs2lzbcOJQollxmSlx1MDAwNmjPXHUwMDEwXHUwMDA0LiyVS1x1MDAwYmTsXHUwMDEySKLCZVxuXHUwMDE3XHJIVLeAQ9OSsWhQq8CmkFTGZfVcdTAwMTfYnFdOXHUwMDE5S5NTXHUwMDFjXHUwMDAz5SDV9DZfT5ZexU1gTnc3Xlx1MDAwZsOrzfNm8aCfdTmVTuTBXHUwMDE5lVOuuFx1MDAwM5pcdTAwMDDTRGROTlx1MDAwNaBuk1xcrFaFrFxcTClOi0SzaYb19LDTL+w91q9d0b/qdFx1MDAxZTd7J6fVbrblVEmYIKdcdTAwMDAqu3JKpaSEXHUwMDFh8vdYTzlPXHUwMDE1VIKPh2pCpufjN7R3s364mV83533++nhkeuv8OOuCylx1MDAxY0DtnyyoknxcXFCzJKaaXG4wXHUwMDA0wfO3XHUwMDEw01R6ylxiM2g3ajaD2Shhv1TcpZt7XHUwMDA3YEzfJ9u7fjv7Ypq+nlwiSFx1MDAxZG5cdTAwMDDFQWROTFx1MDAxNdeUMkN/XHUwMDFmtZ/muYyZzePhRU6NUkpPLaPmnOXzxcZ5uN9/fH3c7txvn5xm3YZcdTAwMDLpMCqFMkk2XHUwMDE0N9xRxlrwS7WhKIqiZiTZsSGUI0Dq8XhcdTAwMDRcdTAwMTCjOIWMxiNmk93YY/2tPZa5hKm2bTjJ0Vx1MDAwNb7F/5/VZSnSYVxyRmmL6ulhfXb4XHUwMDAyft5TV7xQr2ychOboOZd1hiSIXHUwMDAzMlx1MDAxNdZK21AvU3RcdTAwMTH+ynRYXHUwMDEz4XxE7iRvJYKGa1xccleqc5BaxLjZsr2V5brbbHpBzVuNy3LC7Zfrt1x1MDAwNDl6cJhcdTAwMDNAXGZVZoZY4fZdbe+mwangwXGxeEQvb/JnMuPYlMJhP3GZQFx1MDAwYtGMZY6VQsGzZ75IXHUwMDBlhlx0XHUwMDEw0aB+V17ITapcdTAwMDLRkiElXHUwMDAyOb3t8rpdb4b9x7yBRlx1MDAxMFx1MDAxNJ+2n85cdTAwMWGFs2xcdTAwMGKppspcdTAwMDFcdTAwMDU8OZxNXHTXjuZcdTAwMDLFUy8x71xmXHUwMDEwXHSMJipcdTAwMTBcco5SiWkqyFRtXHUwMDFjW2cz80xSNYPo/l1o4fhM21x1MDAxNs3xjKxwLp9cdTAwMDRcdTAwMDXFtDRsXHUwMDA211k71Du3nnvUZ49cdTAwMDdBNYT93eLGa7ZxbVx1MDAxMLjpPlx0PKAy6zszklx1MDAwMc5cdTAwMTFdufKZXHUwMDA1wUuQU6YpXHUwMDE1MFx1MDAwM0eqXj5vVdjV0U6VP5+Lwk3QL1x1MDAxNjMvppNcXGdcdTAwMDIlKKNSynDMSO7Uyj1nK1x1MDAxNlKJ1lx1MDAxY1x1MDAxMJghN//WrcnSy+bL3k7p9aBaKzz7r09+xqVcdTAwMTQmSSnllGZVTCmTVFx1MDAxYmbU77OYpjF5NsFcdTAwMTUkKVPamOmjur2boHeab72E/bOb0kPN3bhxdzJcdTAwMWXVNVo4xJhcdTAwMTQmL1x1MDAxNMpILON8OUSe6pSsNYVcYknOj+FcdTAwMDJpvF6Esv9i8Z/F4kem2bb3XHSekcGnJprLdJVDXHUwMDA1YVxmiJ6ev5+zy7OWd7lxe9ntbolGd//w0n/MOJptRIazlJw3ypl0XGJEXHUwMDAxm+XA2ZIznrKBXHUwMDA0p8FhXHUwMDFmXHUwMDEz0N9ziVx1MDAwNFx1MDAxM8pkN17zXHUwMDA16fGzXHUwMDEzptq2XFw0yzNcdTAwMDI7NWCT6lx1MDAxMqZgXUwoy9NTye3Gxl6NPz3vuseHVXnx+ljtrvOF4rriduveQoGNWFx1MDAwNoQupKSYI512QHCil1xu7PSATcygXHUwMDE5OoOtXHUwMDFkypFXrJBCSlx1MDAwMobPXHUwMDE1aJ0rYONcdTAwMDatsO511n5cdTAwMDQrXHUwMDBl3Uw1kGmCOFLPp4dJalxuXHUwMDFhx0mRhsnp/Vx1MDAxM8WHXHUwMDE3XHUwMDEzPj25OXLd7dTysKGbZivbapharoP6bLBcdTAwMDc9IVx1MDAwNVxyrTI0/IZcdTAwMWJCloLW+XZxUq5cdTAwMDWjXHUwMDE0bdKMqmFcdTAwMWSX3y81PGhcdTAwMGLexjlpS0mqtSwkYajvYXp6zTYvd1x1MDAxZkvr8qWT81tnXHJ1+1x1MDAxNFx1MDAxYZ1xXFxcdTAwMTPk10xyXHUwMDFiQEoqt8CVI4ySVGQvZ1x1MDAwZlBCOFOrT9WfXHUwMDA1wPOKqU5P1aeKXG79YVx1MDAxOL/0j+fMyUbpWD9eX21e3/nth93ScyPzclxuuCZcdTAwMTCZXFxHgDLCXHUwMDFku/9cdTAwMGayl1xcamymj0L2+NvIaSpJSs8hUFxujCZcdTAwMDSmJ0k1n+dLvftbfXBSW2/mLopeob6bbSG1iS5UXHUwMDBizlx1MDAxMpPQJOOOVHS5rlxuqVx1MDAxYy1o8nZ4+JnbmkCRXGbYXHUwMDA0XHUwMDA3+KJIfyGKlFx1MDAxYp9q24aTvDiKNGmbXHUwMDE4kdJINsM2sdvqxo6Xe9jx6F6zXHUwMDFk6vvjorffyjaspZi0nVFcbpHZoFx1MDAxN1x1MDAxMCVcZl1xiulnUSRJU8WUKSRI1kafWkx3bnLHZ512jpa2XHUwMDAyj5RI7rR0sp9xMZVcdTAwMDShT94yoEfFVHHjoH2uSVx1MDAwNrMsKVx1MDAwZdggR1p5bHbpXGZcdNJcdTAwMGKLWFx1MDAxYc9cdTAwMDSZYWd4N1C73fvOPe+Gh4rmm14+f3GYbVx1MDAxOVVEOCY1Td/yJ4nPYbleJO5cdTAwMTj9wVFcdTAwMTRcdTAwMTEkmVx1MDAxNslB+0IotojCXCJf/OizXFxIo/Ns23CGZ2RHqZWC0t1HSlipMdN7jy73blWT54rH7qPvXr1S/2B7O+PBWVx1MDAwMahUXGKQZDjjoeUn9uO66SRnTI9vudFcdTAwMWG1XGasdMe85Fx1MDAxMqL5WXb4xntue1x1MDAxZN9cdTAwMGVsNVGbSfdfwI6bdD9EOlx1MDAxNVx1MDAwNKnQ0lUzbMT2ucjrvHl+XG6uVOG1IM96+iBzKVDcRq1cdTAwMDelgYxBXHUwMDFlXHUwMDAxXHUwMDEwXHTDwH3GXHUwMDE4OFx1MDAxY1x1MDAwNvqCS1x1MDAxOfNFXHJ2ZiviXHUwMDEwUFx1MDAwNlGK16Axc2HVVXXt5kVcIlx1MDAxOMlsWtSX5lx1MDAxZD97oWV1J1x1MDAxOHxUTUjHVWBLsc2wl33TdSu7J1xijcO8d/b85PeL9DD3V4M5Mahcclx0U4ZcdTAwMWHCWbzMlL2isWX7qCBGXHUwMDFhXCJcdTAwMDXjoyNdqVx1MDAxMchcdTAwMTXH4VP++1x1MDAxOIFcdTAwMTNFd0L9WORSs1x1MDAxNGF42W5cdTAwMWSd+b1cdTAwMDNTJ1CqPjw2lNDXfz3BXHUwMDE1jlx1MDAwMFwiJC5cdTAwMWOC0ljR7IFhqFGmiDSaXHUwMDBihmZYrFx1MDAxY1BcdTAwMDZcdTAwMDRcdTAwMTeosHjTK6/EtPz4Tmyr+GiBXHUwMDFiu0mHilx1MDAxOZJg7nePqvu9u9ers/1ypU2e1nub1cXmrH1cbq1CZSdcdTAwMTjKpSRMiFiS0GC9RfPI7jDTRitDtMhSyEfht1x1MDAxMWpcdTAwMTGe4S9e9XeK+Fx1MDAxMFx1MDAwNopcdTAwMWIyw1x1MDAxNpKt+o65qFTKrerTpuxcbl5Yf9nI+D4nSrVwXHUwMDAwKH2rbj5ayVxucY3syu530pAtXzpcdTAwMDeFrG8hkdyMKKO5XCI+wFx1MDAxODJcdDFDyVx1MDAxYriBW/fA7ZTO+lvAz1x1MDAxZbzczU3WhdSg2S6QM0GSkNpcbt7IqIUtj5gxIaX2pSdcdTAwMTmoXHS4fMKk01x1MDAwYr9QyVx0zJC0tb310j3eXHUwMDE3fn9TP235OeNTryGyJqG/4ktcdTAwMTSUI1x1MDAxNXJF7DNcXMS84z8rXHUwMDA0gqOktD5j67BaXHUwMDBly58n/qNwZIBcdTAwMDbIXHUwMDAyKP5cdTAwMTdb+q3iP+nvgUGCTdBUncHPfGqK3adcdTAwMTZ4d5Wji8PmXHUwMDExbWy2TNZcdTAwMDD+UVx1MDAwNYGyb3kh5G1XXHUwMDAwkSP2XHUwMDBmIVx1MDAwZVAgjC4xnss4OMloXHUwMDFli/8wyVx1MDAxNcfFaaWKR+n4jsClv1x1MDAxZsLr1Ly1KFx1MDAwZbP2I/Cra3641ne7a91euYxfp9prfm5YaOZBLSBW5NpFJ1x1MDAxMcLptaWpZEA4m8FcdTAwMTd3lD/cXHUwMDExUKmaO9IzW+LqqsJ0xov1S0ZcdTAwMWRNSUrSqtbMQc233LpXRqWzSDXe91x1MDAxZc1FiiBcdTAwMTlnXHUwMDBiQPMylDKbXHUwMDBi5JlQyqNgWaheXHUwMDFln2zbctE8L0w1pzorXHJRUlBcdTAwMDXT4/retJRX5LeNe/5IToP1wHts1rKNa0o1d/TAOZ2Wa4VPe8l1MJh2TDKux7UzamZcdTAwMDOgVupHR8ODx+qhfV5+xtqqXHUwMDEzNFx1MDAxMlx1MDAwNrDcmqhMpCdpUFx01vOmpncySsWrvFC6XHUwMDBl9ltX7aPd+mknv+5mXHUwMDFioLaYkmAmJXlKK2btXHUwMDE3XHUwMDEwkmvCiFx1MDAxNiPjWm3NL9S6XGJcdTAwMTW16i1NM+nYeVx1MDAwNZXqVEFcdTAwMTWSXGJcdTAwMWPK9Cm7freTU4E6f915aDzXio2r41x1MDAxZJL5ynQoI3TYRr2MXHUwMDFhXHUwMDFjyKiYSmW9o6BXnVGwOClNLfnF018yTVx1MDAxNFx1MDAwZYLwXHUwMDE5XurwUDznl7eF+l4uaFx1MDAwNtvrW/lccq+QcVx1MDAxOVx1MDAwNeFcZktcdTAwMDSNL6WGO1RcdC3R/l+ejM5cdTAwMTeGZVx1MDAwMExcYmkyWrz3L2zE/Fx1MDAxNnFYSLVhKOdcdTAwMWO58ixb72Th+KRU4U+5c2jfV7f2yUW3oLJccmzNJ72txTDILEeSXHUwMDAwaEpos+qsoOUrXHUwMDFmatKzXHUwMDA1mDFcdTAwMDYoNTNon902c7dvTnv3XHJ3vVx1MDAxMPT2XHUwMDBle7vNjFx1MDAwYilcdTAwMWHa3KB2UUmlXHRsXHUwMDEykFx1MDAxYWRVL1X7zFx1MDAxMdXiXHUwMDAwXFxcdTAwMDBARnOrv3RPwtmfXHUwMDE01UpFtFx1MDAwMG5cdTAwMDM60yud1t3e02EzV7tsb16e75yYrWNcdTAwMWFcdTAwMWWk4HlcdG8/jz4zXHUwMDEzooVccmS/18RcdTAwMTnjk0I7REouXHUwMDE3UFx1MDAxNGfiu8+ldLQyXHUwMDA2XHUwMDAxobWWKlZeIyp8XHUwMDE1XHL8PVZtg+yai9UqXHUwMDFlqflcXEHpKdxo0ZS+u9FarXb3XHUwMDFmP4JSr7ZW9Z8/14OWdu9R55mISUHkPeOJ+PyJnHyhu/XQ6VJz0L99cFx1MDAwZnvV4+OTmEsrWS3/lF5mrSHCQVx1MDAxYSkofJRebvOCuN3ro4hcdTAwMDT8q8bkzso3tY5QYbfeXGKZVPVcdTAwMTSIg8ZcdTAwMGKaXFzcVjxRMcUzXHUwMDBlqVFcXFDP/plcdTAwMWRcdTAwMTfTK//RXHUwMDFkTFPVsfzVWrFU7fhuXHUwMDEwXG7GuUBcdTAwMTiN7Su160F6ZW+NXHUwMDA0XHUwMDA0NJtqu+kokj+Me9wlLvGyc5WKXajiS5VJ28alcVxcXHUwMDEzzqrrZ+ZcdTAwMTAzM4RcdNjPifrWfWO7qDZrxyd3u0ePLaLL49j/qMBcdTAwMDdcdTAwMWLs0WBcdTAwMDJcIrShaFx1MDAxYlxuMpJgxZR2XHUwMDE0cC2RlVv7MZL4YcRcdTAwMDbXXHUwMDBlnsIjI+n7gvjaJFVcdTAwMTmrayFcdTAwMTVcYja2Tc2aXHUwMDA16Vx1MDAxYoCk0Vxmllx1MDAwMWU1XGJ7zlx1MDAwM+V3TdvGc9dcdTAwMWVQNf7xIyh3PLz82mjUaOZA2FxcXHUwMDFh+kNcYmpcdTAwMWW9PcU3XHUwMDE5VeFcdTAwMTO5dHpcbmgql0ZcdTAwMDKJRM2o6VPE3FKdXFy+1vy9vZP75/OHu1x1MDAxMls355lcdTAwMDFPWuU+bd+AinqEWS4r9cclXHQ0dZRgkjKDJFeJmKpZ1JryvrYpcIRhtlYvUYbEXFzmXHUwMDExq7GWvLZpYlxuXHUwMDE3UMrFeFx1MDAxMWg8aF/JtIhtios2mlx1MDAxNT5q85Vcbjp+di5t5m1cdTAwMWKf83HuMFx1MDAxMfmpVvSEcojMUM5QXHUwMDFiT29HP7Wa6m6/XHKdMy+fv7+7MNfH3lxc7/38VOijXHUwMDFkzcBcYrRFXHUwMDE0qsKYITGAPuPI6Yh9uVx1MDAwM1x1MDAxNTz+gq9FI386VvNcdTAwMWXdXHUwMDA2VJJSLaLYz9xcdTAwMDa0Qlx1MDAxYv5cdTAwMTOzRKdQhp9hS8+hk5NcdMFcXJpcdTAwMWHVUzpeXHTj0ohcdTAwMTleybB1e1F0Xy9cdTAwMGU2Lp82nstn136zsXubebzyn1V0XHUwMDE5XHUwMDE1aElcdTAwMTH+MZ2bMmZcdTAwMWOGQEbjXHUwMDAxXHUwMDFmh4IlXHUwMDAylkiHMdDK4Hqs7WtcdTAwMTBcdTAwMTOUNcfRaNBcdTAwMDJwxGj0kTFlzSUjuLQs4t10XHUwMDBi19VMz+kh+811dfrMXHUwMDBmesfmfFHKOj3Jx5I9JVxin35r+/ou2zjaLtxXizey0OuVrzc2ve2sY1x1MDAxZtHtID1SwCVcdTAwMWGf2oxgnyOH4sZuzVwiVFEq0sNY/yv2XHUwMDE5XHUwMDBlRKbkUYyX9EIy/tHPsVx1MDAwMmUtXHUwMDE10cvydicp6279TS9GenKgOjveY8/rhp+ttKdcdTAwMWXOXHUwMDEylTekv5/FXHUwMDA2cIhGpTV9XHUwMDEw+vZo66pz17/f93TxtrbezHvd+2LGXHUwMDAxrK1vz1xihlpbaFx1MDAxNVx1MDAwN+jPz9tUbKU4QVPbmPhqtnAzm+GNXHUwMDE4KKU1UVx1MDAxYZl9gupGiCtcdTAwMDJEK1BSgolZSkM7m2iOVkNcdTAwMTZVt5TxylNfqvu95VJn3rbxOV+Q6maTXG4/XHUwMDBmSnBcdTAwMDJMz9tPxUMlfLx5KVx1MDAxN+BJtFxuObbz3LnPOPSlsFlcdTAwMDFGI7zxRzJShVx1MDAwNpW1dohG2ZVWWS6Rtc+muVxy4CpFVlrvS0lkNJ9XjvPvobm/vS1cdTAwMTDf3Xb7PMRrXHUwMDBl1+DvT77X30jIaqpcdTAwMGXa929v4Lco81x1MDAwNkv3n9/+/C8rzvjSIn0= - - - - - mainbranchchallengebranchanother challengebranchexperimentbranchmerge experiment if it was successfulexperiment branchoops!bug fixpull main,create branchpull main,create branchpush branch,create pull requestpush branch,create pull request \ No newline at end of file diff --git a/collaboration/guide/assets/claim_branch_review_merge.svg b/collaboration/guide/assets/claim_branch_review_merge.svg deleted file mode 100644 index 77fd84d..0000000 --- a/collaboration/guide/assets/claim_branch_review_merge.svg +++ /dev/null @@ -1,21 +0,0 @@ - - - eyJ2ZXJzaW9uIjoiMSIsImVuY29kaW5nIjoiYnN0cmluZyIsImNvbXByZXNzZWQiOnRydWUsImVuY29kZWQiOiJ4nO1da1fbyLL9nl/hm/t13NPV755vXHUwMDEwwjNAeFx1MDAwNcLNWSxhXHUwMDBi28Ev/OB11vz3WyVcdTAwMDOSbfnFsYg4XHUwMDE5Zk1ILLnVkrp2711dVf3vXHUwMDBmhcLH3kM7/PhX4WN4X1xu6rVyJ7j7+Fx1MDAwN31+XHUwMDFidrq1Vlx1MDAxM1x1MDAwZono391Wv1OKzqz2eu3uX3/+XHUwMDE5f4OVWo3Bt8J62Fxim70unvd/+O9C4d/Rn4nrdMJSL2hW6mH0hehQfClcdTAwMDVi9NO9VjO6rDbeOS+5ezmh1l3Dy/XCMlx1MDAxZb1cburdMD5CXHUwMDFmffy+vfp4tMXb5mHn5vtW03WEenyIr3pVq9ePelx1MDAwZvXBPVx1MDAwNaVqv5PoU7fXaV2Hp7Vyr4rHYeTzl+91W/hcdTAwMDTib3Va/Uq1XHUwMDE5drtD32m1g1Kt90Cfcf7y6eAh/FWIP7nHf1nLmVLO439WSMvNy9Ho+4Yz7o3UXG7M4GekY59a9VaHOva/PPqJu3ZcdTAwMTmUrivYv2Y5PqdcdTAwMTSWVTmIz7l7ul2hPVx1MDAxM17j87ZeW6vUy1x1MDAxOdWwVqn28Fx1MDAxNGeZlF44bUFFf8ZcdTAwMWRcdKM3XHUwMDAyXHUwMDA231x1MDAxOFdS+ZcjdPn2VjlcdTAwMWFcdTAwMWP/it9DJ2iEW/SVZr9eTz7KZvnpUVx1MDAwZVx1MDAxZLikXHUwMDAzn0fHWXKsXHKNt6DTad293GJieFxc3Tb4Ne+LtS/rXzZ39j9cdTAwMWbb7Wbx48t5f//xynbPVrdMu3jxWL+4hEPfcpWLk4eLJbR7/92vQO+4s7FduthpXtavXHUwMDBlNvtcdTAwMDez21x1MDAxZHxZrKi1L+ZTuOqKm99lt19b3SlWh68y3IG43ae/xS+s3y5cdTAwMDdcdTAwMDOrXHUwMDAzy+nde+91YqDWa83r0ZdWb5WuY0P9kOjwXHUwMDE4QPTC+15cdTAwMWE2gIRJ2Fx1MDAwMFx1MDAwMiTnzlx1MDAwYjU3OMhd2Oiv2N3PlduNxsOXXvE+UGH+wUEzyVx1MDAxNZdGaSelVCPg4JiLkSEzcFBcdTAwMDYxSFo9XHUwMDBlXG6J5/+MXHUwMDAyzlpjXHUwMDA1nv9rUGCa6Sxt9Fx1MDAxYtCwwOiP77nV7Fx1MDAxZNVcdTAwMWXpuoJcdTAwMGZ9ulx1MDAxZTRqdXqn8WOLzFx1MDAwMjtYqlx1MDAwN7XGn6VOiL0oXHUwMDA0zUKt2+0nhiadtlKvVZrRufhwws6QMfVqOGm/nNColcvJmbiE11x1MDAwZWrNsDP+JlqdWqXWXGbqx/P1XHUwMDAzXHUwMDFmQ7j5PC6AicRQ64Z0MLrpV1x1MDAwMYFITCkjQGCNQ5rAXHUwMDEzT3NcdTAwMTZcdTAwMGWcu5/3K/Zb/dzttDo7/fPtzk3lKvc44Fx1MDAxZDNcdTAwMTJvk0BgXGZcdTAwMDfwY2a9NICzN2SHXHUwMDAzoOPX8IJcdTAwMDBcdTAwMTB/9oxcdTAwMDCWgzZKJlx1MDAxOEK+eED61LhcdTAwMTBcdTAwMDJcdTAwMThntUhcdTAwMGX9VyBcdTAwMDCYoU+nIECxXHUwMDEwdLtowIVcdTAwMDek5GhMV1x1MDAxM6y/XHUwMDFlXvWm2H6v1X6V4U+5/IjRI/jrxNB6sXvQU+1+qkKQbqLx44D0Rkth4yc5y/pcdTAwMWLdsGKPr07W2/v6oi/WXHUwMDFme3efy7m3flx1MDAxMFxmyVx1MDAxMDiUXHREXHUwMDAzhq1fasJcdTAwMDYpXHUwMDA10lx1MDAwNGGlXHUwMDFi7ddyjF9cIlx1MDAwMqVzXHUwMDAwYiGeXHUwMDBiz8FG4iBcdTAwMWW/L5RcdTAwMDCkxfeUVzzISlx1MDAxN2w1zvnPQ9/44u/31u9Vt1x1MDAxMVx1MDAxY16HS2j3+1qd35xurJrj4s/zg7Xjzdp+e31cdO1Wt3c73ZPHi7VKN9g8KVUu2vfhUba4+Do6oNVERPBcboR0izhccjY3e42DTzurn8tXXHUwMDBmXHUwMDE3ZWE6e+0rmXtEMFxubX5cIlwiOGA4XHQrXHUwMDE04847x31GuiDB8acoXHUwMDAyIZySmnOdtflnPWHPT9nXwtuw3mpcdTAwMTe+44RZOFx1MDAwZbrXhS8tnInrXHUwMDBmbztxz9GNXGZZu0qM4lx1MDAxMTPFycApXHUwMDFjmXNcdTAwMWLpZWu7fWHPP1+u1fSeO/puu9d7pdxcdTAwMWKplMxKgzc7UO/DRqqEZ5p7p1x1MDAxZFx1MDAxMnuvk3azzGlbx7b/YqNcdL70ZKNOaOO1ydxEXzkznXy/PN78tFH93uy1+ydcdTAwMGb1i/Lxxvd8M/ZG6zYs9KrhQCZcdTAwMTd6rcJaq9as/GhcdTAwMTZcdTAwMGJtvGqhXHUwMDExdFGp/9lAU6aDRK1cdTAwMGKlVqPdx0/ppGedXWiGd4XLTtAsVVx1MDAwYne1Xlx1MDAxNT+ptFrlQlx1MDAxM99E4jwkXHUwMDE3eCuFu7BeL9KhMjXWqOFTf2OpkL/7nlOjWDFcdTAwMTXlhsdmXHUwMDEy5tRkL6VWXHUwMDFhxYmT8+uTdC6ac6BD6s+s5sZpXHUwMDE0/tzHjUTOXHTNmeBgtLJKXHUwMDFi51x1MDAxM09/qc5cdCbwUXPjlUfMXHUwMDAzXHUwMDA01Vx1MDAxNKXCOXNcdTAwMTZQKlx1MDAxOeQkPInIz1JFIFx1MDAxNnNcdTAwMDVLWMKIXHUwMDEx7nlcZomnT/6Ob/BNKEy3XHUwMDE3dHqrtWZcdTAwMTntcLhjT4t3W3OsntGjXGLadFx1MDAxN8xcdTAwMWGD5NJcdTAwMTllJcpNkzjjqlXq031cdTAwMTQ549xZXHUwMDAxwmrvuPNeazt292GzPLtX01x1MDAwNftLryzDkSdpssV+aa7EhF5Jq8BIL1xmdlxunFx1MDAwMDXWqzpcdTAwMDLUp1xiQ/Dxf0Xs6o0+5uh5rlx1MDAxMFx1MDAxZVTDYGxcZuBdJY+NXHUwMDAyR5taXHUwMDFjnlx1MDAwYuO/XHUwMDE1YsOK/vHy93/9kXr25EFcdTAwMWZcdTAwMWRcdTAwMWRcdTAwMWLucXtcdTAwMWaSv1/ll0H+Nlx1MDAxMfekXHUwMDAxXHUwMDEwfJHVmU/nZ1x1MDAwZvLKP8rLjZXtjYvvm6eNnU7ecVx1MDAwZmVcctPeelx1MDAwN9ZGnlk7hHxcYnmMaJVAVepRjNmRji1cdTAwMDf4lORokWCH5rVcdTAwMTjyrGDaKiMtXHUwMDBlXHUwMDE0i2RzXHUwMDE08ZRcdTAwMDSEPGHizuWL+mXlRPlcdTAwMDVO4Km2NklFSTXRzFx1MDAxNNeeXFxcdTAwMDDzW5m52avAl/O17m1p/2G3uv1weHZWybuVaTBMXG6vxMDGRmSUsZpxwSWCvqblkWxklFApMipl8dOAXHUwMDAy8MZmbk358XVcdTAwMWOGN/2w20N+/qlVXHUwMDBlXHUwMDBih+FtLbyboDpmLk++VnfM6ESGXlx1MDAwZWsmXHUwMDA2MIGgmVx1MDAwMZF3bvvs7z2u3Nx3zuxuq9/tu+217VXXzLt9Klx1MDAwM8xxiWA08EWOzIJcdTAwMTbNXHUwMDE3qb9SXHUwMDA2jUNnZaDSp4Qs+YTUeDZQZK1OQm49XHUwMDFkVp43Pz3qa3/5c3N17St83t856+Tb09Hud6tcdTAwMDNcdTAwMWT/JNdR1pNcdTAwMDMgerCFTthuXHLp+q+HhatOqzH0XHLyXHUwMDA2/DnwXGbQuZ1cdTAwMTdr7kSGnPhCqVWvh0FcdTAwMDVcdTAwMGbTiXRzg8+x1WdvQuR4oKORK1wi/ohOOERS/lC4anWm41Rm3pH/jmc1p0fFw1REnehRsYnpczQm1HLrvJrfcZxOXHUwMDFmc46oTlx1MDAwMePI7pTn4LROLGbR97WwzEtjXHUwMDA1XG4+4WxGslx1MDAwMjVcdTAwMWPzqFxcXHUwMDA0Pm/rlE/TXHUwMDE2gvzbWiNcdTAwMDdcdTAwMDWC/jFtgUpbXG7OPSxcdTAwMDFt3507ZabjXCLplsDuXHUwMDE46clcdTAwMWaBMt7HXHUwMDFlxcKzf1x1MDAwM8Zudi7vyXRZXYh9OviShcNJ1GpcdTAwMGKgkn6EZD+Bg0a9aVx1MDAwNb5s8v64sV69J+9JceJcdTAwMTinn7HRXHUwMDFkN/ch+ftVzlx1MDAxM1x1MDAxM7vNRiFOolZ3elx1MDAwMdfJdvVordu6O17pb35cdTAwMTZn9dLPo5PuTt4hjla/pFx1MDAxMS8xbcOc0Vx1MDAwYsc0UjVuhCZcYpMj/VqS58RKJkG657C6XHUwMDE0XHUwMDA2XHTaModooVx1MDAwNE5yyns/XHUwMDE29e49XHUwMDFl1Fxc5DW4RTZcdTAwMWW3d697pZWTeq9+4D6Jx9XHkyX4T35cdTAwMDFRnWpuXHUwMDEz/Sd6SviYxKOgXHUwMDE3WIe+Lq73d068+3bha837x+vaTlDJffiYloIppFx1MDAxMdZBZE5i2NaUY+bJfzlsXHUwMDA1v8CBgrZEXHUwMDBlSZd5XG5Jfvwnn1rNcr80n/8kI10yo1x1MDAwYlx1MDAxOXpPXHUwMDFjn+jdXHUwMDE03HNcdTAwMGJcbtT8od07qztb6nTz8UK3yse9iilVjFvLu3UqY1BBxf7N4fwv7zXTSLeQodBkmM08XGIpa6XjMVwiaJrOXHSRfVx1MDAxY1dGM11cdTAwMDYzUprlL+I6Kde6yK27T3K9ilx1MDAwMyTsXHUwMDE2KDCiXHUwMDFhokonXV5ccm7JXHUwMDExQFFbka8gqF/TXHRttJVWM9L//cHpJbLcMWfB4GuVXHUwMDEwz4i8XHUwMDA15bBUK4cvXHUwMDFmXHUwMDE1alfxl5v4mrvPvXhrx8h7eFx1MDAxMlx1MDAxOVx1MDAwN5JcYtBcdTAwMTNFgaCFbeXM/Fg4Pf9vOVhYbpHKWypcdTAwMTgqxVBcblvvjPDe2ZhcdTAwMDfQ91x1MDAxZKXKOvLfSoun8GxcIt1pVVdcdKREzlx1MDAxOa1QXHUwMDAxxNrkXHUwMDA1XHUwMDFkUZswyylcdTAwMDZCcmesXHUwMDE4czRb44VcXE7M+5I9XHUwMDFm+NiUSFx1MDAxMKzlez7U3fWKrO5cdTAwMWa0+qsrXHUwMDBm/WD1S8XLx1TPXHUwMDA3MC5cdTAwMDGFtvI421vOdTJs48nxXHUwMDAxjFx1MDAwMlx1MDAxOFx1MDAxY9dcdTAwMTLHXHUwMDA1XHUwMDBlXHUwMDBiOXbzc3lCZka3PHeKM4HSk1x1MDAxMplRiHvsmlXjnXLMa+E4UuooXHUwMDAwY7xP78tcdTAwMGaCctt5K51T+Ji5XHUwMDE3f1xmXHUwMDFktYBWx0Hju1LWoI6Y3SBQeDpaXHUwMDBlXHUwMDE3QmnphsJSirRg5JVcdTAwMTVcdTAwMWOpj8FcdTAwMTalntUgcFx1MDAxY1x1MDAwNVx1MDAwNqdnYTyOYp1cZjrCXHUwMDA2jUI0tlxuzdFcdTAwMGInhZjZnuRcdTAwMDZZlZf4llxyXHUwMDA3bkaaXHUwMDFiNe64uVx1MDAwZsnfXHUwMDBiI7zWevTTWItcdTAwMWFuJTg+v98nXYHnnO2iTTHkuo6jXHUwMDEyXHUwMDA1Ye3wYr5cdTAwMTOWSce9XHUwMDA2b8EmqeZStSjjXHUwMDFh20fL5mAoNSGmfEOhguC00kjLsVdcdFx1MDAxMEg4t7nzUsBcdTAwMTJcdTAwMTKd351ze/pC9Vx1MDAxMJpcdTAwMDIgXHUwMDAwgKF3ilx1MDAwNlx1MDAwN5Bwsj6jqWYobjRCj7Y4s8P4vc+F8NP9oIVcdTAwMTFHtlx1MDAxZVx1MDAwNChcIlx1MDAwNCGKXHUwMDBiO96pd1xy6ZOHOP2MXHUwMDBm7mUhXHUwMDFjKDUxN4smXHUwMDA0ROlcdTAwMDXKuaTmueVcdTAwMWPgKFx1MDAxY0+D1IKSMVx1MDAwMVx1MDAwMS7GsKdcdTAwMDY8w1x0h3vPLSGgXHUwMDFi6dmSwiGkZIpcdTAwMDP+4lx1MDAxNs1K+Fx1MDAxNFx1MDAwZauNxklcdTAwMTUvYXGE4FxmO45w2Fx1MDAwMIpcdTAwMTKc8P+LXHUwMDExbtREl7iwx5G5guBGXHUwMDAyXHUwMDA1TCOFNePYZ5j00SSCL1xuvLTvO1x1MDAxZbmoXeTLct5cclx1MDAwMq2HiZVA3JF4XHUwMDE0WVx1MDAxN5Jroa2b2aBcdTAwMTBMIVx1MDAxM0A1XGJUV8CL4Vx1MDAxNqU3zGlcbvpGayOxNlx1MDAwN5W0xN29UtpjXHUwMDA3rFx1MDAxZG5cdTAwMTBcdTAwMTVcdTAwMWJcdTAwMDONklx1MDAwNPmY0qDkrPbwskyAk0pcbpJcdTAwMGIjVHLMxlx1MDAxNlx1MDAwNNqJpXGQWE+kkkiDXHUwMDExhVx1MDAxMmJqJtLuXHUwMDFmrGzv+db2/lbxS/OnOK9u8/vbvCOt8JzhODNI0fFtSjOcduI0PnmU8cpRbJrnXHUwMDE5lcTgKXFcdTAwMTGJgL+Xklx1MDAxOIbi4ZX4hXGhSLnFK3X/68rWXGbcZz+ad2EnLPxoPlx1MDAwNSol/UVv4Vqco1x1MDAxYvOsb1x1MDAxOPdaQqQnXHUwMDA2MzlNem+R7LB0XHUwMDE3d87tXHUwMDE05z+GbEdRrLqklYxcdTAwMTFChGbMhcRcdMFJR6lcdTAwMTLZXHUwMDE4KkPG4Fx1MDAwNWXCXGIqKpiyXHUwMDFh6fHV43xigCuhlVx1MDAxYo9cdTAwMWRcdTAwMDXpKTxnXHUwMDE5i5O5pUOTSM/0pbUh0iM4kkZcdTAwMGYk9aQ1nqdoK8fIreaUl+Bxslx1MDAxYSc9c1x1MDAxMbFWs/F4t+NcdTAwMWa25PX5xYm8cSf17Z+TXCKsyLflnSaeRVx1MDAxZSk/1inFXHUwMDFjziSg8IHirK3E+45tmjje6Wd0pC+JlCiYnFx1MDAxMWbpXctFXCI3zfG3Tzeb/W8nW+XrrSOzbY0s5j5cdTAwMTNWKs6sdU/V+sbq9SFcdEC6KrlGda5Fwi+65GKe8VOelqyCL0Xj2LCZV+rLT7DFU1xcdDks3Fx1MDAwNd1C0G53WrdvzUhm9WFcdOFcdTAwMTbTY1x1MDAwZsVERlwiUKdoVEZcdTAwMGKY6XRcdTAwMTDOqZkq65hcdTAwMDVaedJcdTAwMDBITUYoXHQqKEa+MznIZ5HZuKGl0myQszb4XHUwMDExKZxcdTAwMDQlNEMt95xZg1x1MDAxYXPUilFcXDqOiJPXortZxVx1MDAxZs4qjvs8ky1cdTAwMTF6XjUpykQhglx1MDAxMWvzOPqEUFx1MDAwYizq797feDg5O6lcdTAwMWXzi/LuwUlcdTAwMWJuXHUwMDFh53k3Nk3ZXGZWUVx1MDAwMrQhY1x1MDAxYilwLcAwYYx/Tu/MaM1cdTAwMDdg3LrG50SEXHUwMDAzoexvVatqN+xUnlJ/Pv2K2J9p188w8lDKKeVcdTAwMGKkVcbLXHUwMDA1ZsEv4mF1x1x1MDAxY4nW5re91lnNlzrto9ynXHUwMDE5afBcZic5ZOaS09rMXGJZXHUwMDE1VrFcYp6Gyj4u1ShdilHK8TRcIoX2IJyEd59VvVBcdTAwMDGp2C5cdTAwMDZcdHsjiX8pXHUwMDE5e2utZlh468i9/7yfc8bVPZUvWdhVPjmbkFJ7vCNHxNyGXtn7eWqvbtvl5s1J5+Z442z/xuQ+xFhrw6TWyiSfbJRKqC1D8zeoRVx1MDAxNVx1MDAxYXlcIlx1MDAxMGK5daNTXHUwMDAyLFKc5Fx1MDAwNsFIXHUwMDE473+jyVx1MDAxN+2i0Ks+Zeb+aEYxrle1Zq1bXHLLP5qR1fSC7vWbXHUwMDBi1MU6tVx1MDAwNFx1MDAwN/pUxSrc5MUuaXFq4ElcdTAwMWKYZcJd/7B5fL95b3pcdTAwMGafz8Jv/PLelbbybsKo/5hLXHUwMDE0eFx1MDAxZibRaMDMS4Bs7VihYJX+OSeP85TJXHUwMDFiO4dcdTAwMWSRepgyPNWYtFopylrO2ryfXHUwMDBlLLvI5Fx1MDAxZq9s93LLlK96lY3mVrO43bs9be80d/xcdTAwMWJsO/P0tzdTwJNcdTAwMDPb7URcdKxcdTAwMDVE27jMbbzpLynvxis9XHUwMDAzr1x1MDAxMKVcdTAwMTSMXHUwMDFir1x1MDAxNoqhRYGhSGJcdTAwMDVcdTAwMTml81x1MDAwYlLZTihlXHUwMDAwwJq0srAgNNNcdTAwMGWxxDo8h2rpjZqw4ILq+XGdv7D27JfAptcgLlxmxTxSUrHF9+295dr49Fxics8lvm/hlVHavDLocfpkVkgk+FNcdTAwMDVcdTAwMWXkwlx1MDAwMrwxQ/GOybBIcFxugLw0XHUwMDE0+mXhfVx1MDAwN0FOXHUwMDFl84Ojo6M9bu9D8vfC5en5ZP+6do7rRepcIq7slaq61+/+lFtcdTAwMDdy4+Fgryb9stN4lo53iHOWUbYxZczTz0hcdTAwMDFcdTAwMTPDmTbWWYhcXFx1MDAwYlx1MDAxOWX3g02BODXuXFxwXHUwMDEyNCj43VRH0Fx0XHUwMDBil4NGfzSjXHUwMDAyy79QbczVmXlUhnqlj1x1MDAwMCY76bmjKIZcdTAwMDVqXHUwMDA0PKzzi+P+/bHba1x1MDAxY/TbX3bF7e3pae5NXHUwMDE2uGaR5/t5QWpYYFiaQZCtKZelo0DN5aRcdTAwMDeEbiUpru/3MdmV7nVUimszrLff1j7Tr5yhZ1x1MDAxZdxEY1RcdTAwMTRcdTAwMDa8UD316bmIOTVGpINMeK/Sxb7TilFYK1x1MDAxMjpSXHUwMDE0XCJcdTAwMWJblDBfxVOHdJFcdTAwMDP8qk1f50l2XHUwMDFlXHUwMDFhdVx1MDAwYm/3iHJHqUT+RNaWzlx1MDAxOCvcXHUwMDA1tVx1MDAxZfL+yO6CQi9cZlx1MDAxYYVcdTAwMDZ26m1ccn+ujmRcdTAwMWSskrT1sfKqOFx1MDAxM+HYW6C+6kHNXFyXPz9cXO/fnq2uuupG0ZxcdTAwMTd3845cdTAwMDaenPScR543XCLUwy58LymbTepB+jT3I93KUaWsyL+PiOZ/t1CVWaWyXttuVo7FWfvAvbbduuuUVlxyXHUwMDA09r7b3miv3FVcdTAwMWFcdTAwMWL39zlcZtnRk1x0XGIo5dDWXHUwMDE2qbl+vb7/tfZlp3Fw0/blYq/V319ccnKPOcBcciXXXHUwMDAwPK84XGZjjuaMwqS1llx1MDAxMtmIXHUwMDE47dfbhrE6nFx1MDAwNFx1MDAxY1xyj99HXHJ8XHJqnT83oprBXzutXG7eWIPchG9KXHUwMDBmZvUhS4Wg1OTymThT4phcXEBcIpRPLz/Xy4fySFxcm1N3fFg6X3/IvYF6i9Otp1x1MDAxNEpII1x1MDAwNV4gKVx1MDAxMJS+XHUwMDFiXHUwMDE1Xc8m+U2KXHUwMDE0XHUwMDAzTaQ7vVTINI4ke17X/bKa9n/BdqppqPKaemRUXHSr3Wld4sOlXHUwMDFhW8OlwpOFuGJfXalcdTAwMWV0u6RccqJi47TXJJ7SQnOJtEM1rLffPDLpre5mzvglLV9cdTAwMDV2yLonYVx1MDAxZCUw+0WKgq31mtXm+U1cdTAwMDdOLrtH3/j1w8VZcjfvnEKdc0xJIZ6dISPbS3DHLFx1MDAxNYuJNp9cdTAwMTM6m+VT6ebbXlx1MDAwMsBaSi7P625KsyRDXHUwMDBlMSneMuGu1bl+2bmgXHUwMDEzNlq98Km638v+XHUwMDA28Y6S5XDc1oNyOVx1MDAwMlx1MDAwM7Lf4l3QxFx1MDAxYivUg8uwPr5cdTAwMWZcdTAwMDK+UCpcdFx1MDAxODRcdTAwMWYoVrHWLVx1MDAwNNG7KbSahW5cdTAwMWRcdTAwMDfOMyy8wMFTrcJcYjiijVx1MDAxY95cdTAwMWHt3v9zynhbXHSRXGJYXHUwMDFhJY3kXyGhMzeSplx1MDAxYlLOkZRcdTAwMTbBmcJcdTAwMTnjOYxsXHUwMDE4Sp1QLCp6qFx1MDAwN1v5ZFx1MDAwM6WoK1VcdTAwMWNcbkPF08eRXHUwMDE1j7KE+9uMV99cdTAwMTIgXGZcdTAwMDdv5O+4tcT0abyQjEQh96j0JJioXGZJcmeywktsXGLgWXH+TUoxw7lcIlGm+1ZcdTAwMGKF4Wxs7rVy3Cmqw2LcWJ/ed+BJcfJcdTAwMTinn/HRXHUwMDFkN/gh+XtxiFN+Yvkt7anQmVx1MDAxMPOzxfRcdTAwMDDD5WLc8kvIgnD0+DlcdTAwMTdKIFx1MDAxOVx1MDAxYoE4i49cdTAwMWVoY1x0XHUwMDA0Qa1NRltyOk2M1eFcdTAwMTPXtI1PXG7C4WtnXHUwMDFhTU5JXHUwMDFj/qDHQ+2AO4s2XHUwMDBiy6CSS1x1MDAwNziLk0TM1d6y+NbcZVxc0Vx1MDAwNoUy4DTO7DhcImhZYjxcbs8yYynXXHUwMDBiXGbVWHfg9dhzeU+4I6RgtIdcdTAwMGZcdTAwMTUvloLqviW/Xlx1MDAwNODMKO3xXHUwMDE1Ko5cdTAwMDaiZranXHUwMDAwrVx1MDAwNVx1MDAxNFpcZkdcdTAwMTWW2Lsvak9cdTAwMTnH8DJkTFx1MDAxZVuVs+vCKlq5XHUwMDE2VlE1e1qx8kPtUW1QXHJU39lwnLD07PaEkmhFIMkhT1hcdTAwMGJ6uMFRI1tcdTAwMTBnJ/pcdTAwMWb15D3fLVotxU/Ov0BQO9q5XHUwMDE3/V7lsLF5tKa3T8uVa8h9Plx1MDAwMlxib1x1MDAxOdVwsWg+3Dkx7ICUyjODXHUwMDE4a9D0XHUwMDA0TvEum1x1MDAwNEJcdTAwMWMsLFx1MDAwNVxcU6L8dJRDr23mTshpsIlPSb1dIFx1MDAwMUqsWqOAuutpO8FcdTAwMWbNoNlcdTAwMWHUuif19rb6dO7OLCHKbzI9Si5cdTAwMTKNXHUwMDE244rmXGJcdTAwMDXzXHUwMDA3XHUwMDEzpPt38263XG5cdTAwMTSTRuI0IYxOycZHi+JcdTAwMDYossJSKe1s+Fx1MDAxMUpcdTAwMGUq9kUlXHUwMDE4KVx1MDAwMjhlXHUwMDE5XHUwMDAx51x1MDAxYWbwnVx1MDAxONJcdTAwMDeQdL89WzSqXHUwMDFhKbWTeeRHWVx1MDAwYkBcdTAwMTW4Y3f18+f1iVr9vHd5t3u8Wz1MXHUwMDE3gPh8PZfANc26kJKJ4Imq4oTLXHUwMDE1zmrSmvG6V8utikppXHUwMDA2pPmk5lRcdTAwMTNK8zRiXHUwMDE2Vf13nJawXGI0jX/fdVFxtmRIL7V0VlitXHUwMDEz/mH6KVxuTztOoFQ3XHUwMDFlXHUwMDE1izNiJvNxiinnXHUwMDFkheNaikhcdTAwMTithy+ogL2W3HinnTAziVx1MDAxNFxi1EVe0ujl2K5cdTAwMTPDzFxmL8ScXHUwMDE2pFVpa1x1MDAwNDGTOeItIXVEfYWSU1KM4PBcdTAwMGVcdTAwMDBj1r0kYmblRHxcdTAwMTeoXHUwMDAzuFx1MDAxM4v4+Or6OnCljrk82Gyq8tGx3epWSrlcdTAwMDd4rTVcdTAwMWFcdTAwMTjV4Fx1MDAwNVx1MDAxY1x1MDAxN1x0wlx1MDAxM+lfpOyWXHUwMDBm/H/E2H9tVVTkh9hJpX/pbm9cYpDuVVr2VaTsR7NcdTAwMTL2XG70MJFcdTAwMDL1WpGv/n/elorN6ELWydxUen+SkVx1MDAwMlwiglx1MDAwNyFcdTAwMTZIXGKdPlx1MDAxNebUSD1cdTAwMTUnTux+OpxsXHUwMDAx4Fxys1x1MDAxYVFcdTAwMWNnbpOZeFKo0UhcdTAwMTdcdTAwMWLOQdA2R2kpocox4CjUjUR2kESTZ1x1MDAxYkZcdTAwMWRo8C5MXlc8/1x06vwnqDOqeign1pAwXHUwMDFhXHUwMDEwdvT8xKDvKoG7XGKC7cvN8u1p97HV2V3PfbUnklx1MDAwMCzawlGBXHUwMDE3iLTD28BcIlx1MDAwNHCmvVE2ykXXXHUwMDE5bUxcdTAwMDFgmY1cblx1MDAxOVwii0Sqm1ZcdTAwMDFcbrkoylx1MDAwMiW0QH1cdTAwMDfCjHlznJfISFx1MDAxM9ukv9+IT1x1MDAwNuSnpb3GKDc7XHUwMDExWD+DR5w0i6tP7b8peUi7bqa5YGJymTby90pcdTAwMTKuc1x1MDAxYm7ZftpRle8rO191o/yt94m3XHUwMDFiPPfZ1N5KZqTXSNuNoiXFXHUwMDExu9WCRaHYwnLpfUahnj4tmTru5/OqvFwi2clV5omZOSNcdTAwMDO5XHT1RLKGzUnlOdI16+aGk2JcdTAwMTQndFx1MDAxYqaH/VBcdTAwMTBPeN+uXHUwMDA3T8E7OMT7PbREXG7mXHSaT3XdSq1cdTAwMDa9nrdcdTAwMGV7Wn6/R+FMXHTyXHUwMDBi015cclxi1CZhPnFlOjNcdTAwMTXcJm9cdTAwMGWhxeTN6Y1cdTAwMTFSeCPmTzxPp2I5RzckXHUwMDA0kkG006ZcdTAwMDZcdTAwMGbOjW1cdTAwMGbhkJZo7mmZiU7Khpb859tDeIorMGJcdTAwMTlbvLw7f/TcsT+ccVxuLIs2dlx1MDAwN6CV45TdITgzyFU1XHROgTObtWM3P5dHem4nOVx1MDAwNSSBXCJcdTAwMTLmca5FdlxuNqaWL90yXGZZs8GjTnlLi95jvXpXXHUwMDFl6V+wP1x1MDAwNMDEVXNAKeCNNmZ+tFv/eSD9+uHXLd44OP9cdTAwMWF2blx1MDAwZde/XuZcdTAwMWXtUPQwQbuOPceGXHKjXHUwMDFk+eE5N9rDqNEuXHUwMDEz7XxKMHtKYp2XVE1cdTAwMTCWsf3fr5ZZ8/Kgbr9Uwlx1MDAxZb+xR3b8qpkmz01eXHUwMDA0R45jNYhFUkpOz1dcdTAwMGZOXHUwMDBmYa96XHUwMDEwdIr7ne/NXHUwMDAzt5776Fx1MDAxNcRwZlx1MDAxMe9RcVuw2lxyu0KkclxmuTO30iEvsZJntCXVvMErimSuNvBcdTAwMGKXSVx1MDAxY7mE1Fx1MDAxYm5cdTAwMWX3T+zK8z28LJtMq2NNUX3ISuZ3YaZvn5Fzs1WGXG7Wk5tcdTAwMDMtwlxiPlx1MDAxNrtcIonV4DHFkdHYbCZPrVx1MDAwNXPCXCJcdTAwMTG0YGin03FcdTAwMTPGd+WYoV1GNa1+J+vfPZm0xXdFXHUwMDFimuZvKzlUuS5ZkHX5YmGRbduQXHUwMDEwOlx1MDAxZIWgckWFXHUwMDFjx1g5XCI1vm2nJIK4U0KOXHUwMDA38M6lXHUwMDE15C5s9Ffs7ufK7Ubj4UuveFx1MDAxZqhwQp+Qk9BuckaiWlx1MDAwMJnSJ5BMcto5QSqLNNry91x1MDAxZLxSdIZccupKJ79W1JwpqqkpZm65W1x1MDAwNCtcdTAwMTggqdBcdTAwMDL/h0TBnsFRQ5uiccpcdTAwMDYhsjdPuIqgr3Dh0Vx1MDAwZbWPQriHWlSOMy1cdTAwMTAwcegoa1x1MDAxNZ+ji14xXCK7XHUwMDE0NKMsypHhXHUwMDE2kVx1MDAxNzEhhDSaS5StMHvjYi1cdTAwMTnqKsRcdO654N7J4aeHOMVQgDllXHUwMDFkwoRxM+9cdTAwMTnA0L7hSlx0pFx1MDAwM8bJkVx1MDAxMFwiQLxjWkZcdTAwMTXAPcWAz2xQeMG4oOVcdTAwMWWvrVx1MDAwNFx1MDAwMyPtjUJY3N6H5O+Fq6P6iaVcdTAwMWKstkbYXHUwMDA1XHUwMDAydHaPoX5wc3p2u3988Ll33N7bvz/8mvdJzHvinp7sXHUwMDAwRvdjQMHDrFBcdTAwMTJVXCK+aFx1MDAwYtnMYDhEWLI++1x1MDAxNFxyiLMsbbzifiNcdDha9qz65lx1MDAxNVx1MDAxN6f3YFx1MDAxMWn44cn6P1x1MDAwNu32UY9cbrc9T4FcdTAwMWZva+HdasqguYp+aO1cIrJsMqIwolx1MDAxOX9/+Pv/XHUwMDAxU3lJdCJ9 - - - - - claim/create an issue- assign yourselfDevelop Your Task Locally- move the issue to Doing- pull master/main to your computer- create a new branch with a good name- create small, well-named commitsRequest a Code Review- push your branch to the group repo- create a PR from your branch main/master- request a review from your colleagues- link your PR to your issue- move your issue to Ready for ReviewConduct a Code Review- discuss your changes in the PR- have a call to talk in person- run the code from your branch all together- decide together if the code needs changeschangeswere requestedyour code was approvedMerge your Changes- merge your PR to main/master- move your issue to Done you think youhave finishedyour taskyou are blockedwith your taskAsk for Help... waiting for a team matePair/Group Programming- discuss the problem in your issue- have a call with your classmates- ask a coach for help - push your work to your remote branch- link to your code in your issue- add the help-wanted label to your issue- see if anyone is around on slack- ask for help in the class repoclaim or create another issueget back to work!Un-Blocked- remove the help-wanted label- explain the solution in an issue commentsuccess!claim or create another issue... waiting for help \ No newline at end of file diff --git a/collaboration/learning_goals.md b/collaboration/learning_goals.md deleted file mode 100644 index 11c583d..0000000 --- a/collaboration/learning_goals.md +++ /dev/null @@ -1,5 +0,0 @@ -# Learning Goals - -## Collective - -## Individual diff --git a/collaboration/retrospectives/0_cross_cultural_collaboration.md b/collaboration/retrospectives/0_cross_cultural_collaboration.md deleted file mode 100644 index e69de29..0000000 diff --git a/collaboration/retrospectives/1_problem_identification.md b/collaboration/retrospectives/1_problem_identification.md deleted file mode 100644 index e69de29..0000000 diff --git a/collaboration/retrospectives/2_data_collection.md b/collaboration/retrospectives/2_data_collection.md deleted file mode 100644 index e69de29..0000000 diff --git a/collaboration/retrospectives/3_data_analysis.md b/collaboration/retrospectives/3_data_analysis.md deleted file mode 100644 index e69de29..0000000 diff --git a/collaboration/retrospectives/4_communicating_results.md b/collaboration/retrospectives/4_communicating_results.md deleted file mode 100644 index e69de29..0000000 diff --git a/collaboration/retrospectives/5_final_presentation.md b/collaboration/retrospectives/5_final_presentation.md deleted file mode 100644 index e69de29..0000000 diff --git a/collaboration/retrospectives/README.md b/collaboration/retrospectives/README.md deleted file mode 100644 index 08d9b05..0000000 --- a/collaboration/retrospectives/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Retrospectives - -Retrospective documents for each milestone of the CDSP. diff --git a/collaboration/retrospectives/_template.md b/collaboration/retrospectives/_template.md deleted file mode 100644 index c24b796..0000000 --- a/collaboration/retrospectives/_template.md +++ /dev/null @@ -1,31 +0,0 @@ - - -# Retrospective - -## Stop Doing - -## Continue Doing - -## Start Doing - -## Lessons Learned - ---- - -## Strategy vs. Board - -### What parts of your plan went as expected? - -### What parts of your plan did not work out? - -### Did you need to add things that weren't in your strategy? - -### Or remove extra steps? - ---- - -## Individual Rerospectives - -### Name - - From 9a7de95917a4596c4ce54df3bad1fd5d49216736 Mon Sep 17 00:00:00 2001 From: Aseel Omer Date: Wed, 19 Nov 2025 02:36:58 +0200 Subject: [PATCH 2/3] updating the repo --- 3_experiment/README.md | 0 4_findings/README.md | 0 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 3_experiment/README.md create mode 100644 4_findings/README.md diff --git a/3_experiment/README.md b/3_experiment/README.md new file mode 100644 index 0000000..e69de29 diff --git a/4_findings/README.md b/4_findings/README.md new file mode 100644 index 0000000..e69de29 From 3ac07a8dd2f397e7fc7206e27e10c773a919849f Mon Sep 17 00:00:00 2001 From: AseelOmer Date: Wed, 19 Nov 2025 02:45:43 +0200 Subject: [PATCH 3/3] =?UTF-8?q?Add=20README=20for=20ELO2=20=E2=80=93=20GRE?= =?UTF-8?q?EN=20AI=20Project?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This README introduces the ELO2 – GREEN AI Project, detailing its objectives, methods, and findings related to comparing open-source and commercial language models for sustainability. --- README.md | 200 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 200 insertions(+) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 0000000..a5069d4 --- /dev/null +++ b/README.md @@ -0,0 +1,200 @@ +# 🌱 ELO2 – GREEN AI + +***Comparing Commercial and Open-Source Language Models for*** +***Sustainable AI*** + +This repository presents the **ELO2 – GREEN AI Project**, developed +within the **MIT Emerging Talent – AI & ML Program (2025)**. The work +investigates the technical performance, sustainability traits, and +human-perceived quality of **open-source language models** +compared to commercial systems. + +--- + +## 🔍 Project Overview + +### Research Question + +**To what extent can open-source LLMs provide competitive output quality +while operating at significantly lower environmental cost?** + +![image](readme_images/trade-off.png) + +### Motivation + +Large commercial LLMs deliver strong performance but demand substantial +compute and energy. This project examines whether **small, accessible, +and environmentally efficient open-source models**—especially when +enhanced with retrieval and refinement pipelines—can offer practical +alternatives for everyday tasks. + +--- + +## 🧪 Methods + +![image](readme_images/project-timeline.png) + +### 1. Model Families + +The study evaluates several open-source model groups: + +- **Quantized Model:** Mistral-7B (GGUF) +- **Distilled Model:** LaMini-Flan-T5-248M +- **Small Models:** Qwen, Gemma +- **Enhanced Pipelines (applied to all model families):** + - **RAG (Retrieval-Augmented Generation)** + - **Recursive Editing** + - includes AI-based critique and iterative refinement + +These configurations serve as the optimized open-source setups used in +the comparison against commercial models. + +### 2. Tasks & Dataset + +Evaluation tasks include: + +- summarization +- factual reasoning +- paraphrasing +- short creative writing +- instruction following +- question answering + +A targeted excerpt from the **Apollo-11 mission transcripts** served as +the central reference text for all evaluation tasks. All prompts were constructed +directly from this shared material. Using a single, consistent source ensured +that every model was tested under identical informational conditions, allowing +clear and fair comparison of output quality and relevance. + +### 3. RAG Pipeline + +Retrieval-Augmented Generation (RAG) was applied to multiple model +families. The pipeline includes: + +- document indexing +- dense similarity retrieval +- context injection through prompt augmentation +- answer synthesis using guidance prompts + +RAG improved factual grounding in nearly all models. + +### 4. Recursive Editing Framework + +A lightweight iterative refinement procedure was implemented: + +1. **Draft Generation:** + The primary model produces an initial output. + +2. **AI-Based Critique:** + A secondary SLM evaluates clarity, accuracy, faithfulness and relevance. + +3. **Refinement Step:** + A revision prompt integrates critique and generates an improved text. + +4. **Stopping Condition:** + The cycle ends after a fixed number of iterations or when critique + stabilizes. + +This approach allowed weaker SLMs to yield higher-quality results +without relying on large models. + +### 5. Environmental Measurement + +Environmental footprint data was captured with **CodeCarbon**, recording: + +- CPU/GPU energy usage +- Carbon emissions +- PUE-adjusted overhead + +These measurements enabled comparison with published metrics for +commercial LLMs. + +### 6. Human Evaluation (Single-Blind) + +A structured Google Form experiment collected: + +- **source identification** (commercial vs. open-source) +- **quality ratings** on accuracy, faithfulness, relevance, and clarity + (1–5 scale) + +Outputs were randomized and anonymized to avoid bias. This provided a +perception-based counterpart to technical evaluation. + +### 7. Analysing the Results + +.... + +### 8. Publishing an Article + +.... + +--- + +## 📊 Key Findings + +- FINDING1..... +- FINDING2..... +- FINDING3..... +- FINDING4..... + +--- + +## 🔮 Future Work + +- Evaluate additional open-source model families across diverse tasks +- Test optimized pipelines in specialized domains (medical, legal, technical writing) +- Track carbon footprint across full lifecycle (training to deployment) +- Conduct ablation studies isolating RAG vs. recursive editing contributions + +--- + +## 📢 Communication Strategy + +The research findings will be shared through formats designed for different +audiences and purposes: + +### For Researchers + +A comprehensive research article will document the complete experimental design, +statistical analysis, and implications. + +🔗 **[View Aticle](link1)** + +### For Practitioners & Educators + +An executive presentation provides a visual overview of the research question, +methodology, and key findings without requiring deep technical background. + +🔗 **[View Presentation](link2)** + +### For the Community + +A public evaluation study invites participation in assessing AI-generated texts. +This crowdsourced data forms a critical component of the research. + +🔗 **[Participate in Study](link3)** + +### For Reproducibility + +All materials (dataset, prompts, model outputs, evaluation scripts, and carbon +tracking logs) are publicly available in this repository. + +🔗 **[Browse Repository](https://github.com/banuozyilmaz2-jpg/ELO2-GREEN-AI)** + +--- + +## 👥 Contributors + +- [Amro Mohamed](https://github.com/Elshikh-Amro) +- [Aseel Omer](https://github.com/AseelOmer) +- [Banu Ozyilmaz](https://github.com/doctorbanu) +- [Caesar Ghazi](https://github.com/CaesarGhazi) +- [Reem Osama](https://github.com/reunicorn1) +- [Safia Gibril Nouman](https://github.com/Safi222) + +--- + +## 🙏 Acknowledgments + +Special thanks to the **MIT Emerging Talent Program** for their guidance and +feedback throughout the project.