1
0

Compare commits

...

3 Commits

Author SHA1 Message Date
15772e30d9 update cpp solution 2023-09-22 22:21:20 +03:00
5a16ea9ff6 remove rust solution 2023-09-22 21:50:48 +03:00
cd9c0f52e9 update lab1a go solution 2023-09-22 19:34:11 +03:00
13 changed files with 96 additions and 515 deletions

View File

@ -10,26 +10,20 @@ class LimitedBuffer {
std::mutex items_mutex;
std::condition_variable items_cv;
int reserved_count;
public:
int estimated_size;
int consumed_count;
int processed_count;
std::mutex processed_mutex;
int estimated_count;
LimitedBuffer(int capacity) {
this->capacity = capacity;
estimated_size = 0;
consumed_count = 0;
processed_count = 0;
estimated_count = 0;
reserved_count = 0;
}
void insert(DataEntry entry);
DataEntry remove();
void update_estimated(int change);
bool consume_estimated();
void mark_processed();
bool is_done();
bool reserve_entry();
};
void LimitedBuffer::insert(DataEntry entry) {
@ -50,27 +44,13 @@ DataEntry LimitedBuffer::remove() {
return entry;
}
void LimitedBuffer::update_estimated(int change) {
std::unique_lock<std::mutex> guard(processed_mutex);
estimated_size += change;
}
bool LimitedBuffer::reserve_entry() {
std::unique_lock<std::mutex> guard(items_mutex);
bool LimitedBuffer::consume_estimated() {
std::unique_lock<std::mutex> guard(processed_mutex);
if (consumed_count == estimated_size) {
if (reserved_count == estimated_count) {
return false;
} else {
consumed_count++;
reserved_count++;
return true;
}
}
void LimitedBuffer::mark_processed() {
std::unique_lock<std::mutex> guard(processed_mutex);
processed_count++;
}
bool LimitedBuffer::is_done() {
return estimated_size == processed_count;
}

View File

@ -11,57 +11,32 @@
using namespace std;
using json = nlohmann::json;
const int inputBufferSize = 10;
const int inputMonitorCount = 3;
const int outputBufferSize = 10;
const int outputMonitorCount = 3;
const int bufferSize = 10;
const int threadCount = 1;
#include "common.h"
#include "limited_buffer.cpp"
bool filterEntry(DataEntry *entry) {
void processEntry(DataEntry *entry, vector<DataEntry> *entries, mutex *outputMutex) {
this_thread::sleep_for(chrono::milliseconds(100 + entry->criteria));
return entry->sugar > entry->criteria;
}
void outputEntry(vector<DataEntry> *entries, DataEntry *entry) {
this_thread::sleep_for(chrono::milliseconds(200));
cout << "Output: " << entry->name << endl;
entries->push_back(*entry);
sort(entries->begin(), entries->end(), [](DataEntry &a, DataEntry &b) {
return a.sugar < b.sugar;
});
}
void filterEntries(LimitedBuffer *inputBuffer, LimitedBuffer *outputBuffer) {
while (!inputBuffer->is_done()) {
if (inputBuffer->consume_estimated()) {
DataEntry entry = inputBuffer->remove();
cout << "Started to filter: " << entry.name << endl;
bool is_filtered = filterEntry(&entry);
cout << "Finished to filter: " << entry.name << endl;
if (is_filtered) {
outputBuffer->update_estimated(+1);
outputBuffer->insert(entry);
}
inputBuffer->mark_processed();
}
if (entry->sugar > entry->criteria) {
outputMutex->lock();
cout << "Output: " << entry->name << endl;
// TODO: make this a sorted add
entries->push_back(*entry);
sort(entries->begin(), entries->end(), [](DataEntry &a, DataEntry &b) {
return a.sugar < b.sugar;
});
outputMutex->unlock();
}
}
void outputEntries(LimitedBuffer *inputBuffer, LimitedBuffer *outputBuffer, vector<DataEntry> *outputList, mutex *outputMutex) {
while (!inputBuffer->is_done() || !outputBuffer->is_done()) {
if (outputBuffer->consume_estimated()) {
DataEntry entry = outputBuffer->remove();
cout << "Started to output: " << entry.name << endl;
outputEntry(outputList, &entry);
cout << "Finished to output: " << entry.name << endl;
outputBuffer->mark_processed();
}
void processEntries(LimitedBuffer *monitor, vector<DataEntry> *outputList, mutex *outputMutex) {
while (monitor->reserve_entry()) {
DataEntry entry = monitor->remove();
cout << "Started to filter: " << entry.name << endl;
processEntry(&entry, outputList, outputMutex);
cout << "Finished to filter: " << entry.name << endl;
}
}
@ -89,22 +64,17 @@ int main(int argc, char **argv) {
vector<DataEntry> outputList;
mutex outputListMutex;
LimitedBuffer inputBuffer(inputBufferSize);
LimitedBuffer outputBuffer(outputBufferSize);
inputBuffer.update_estimated(entries.size());
LimitedBuffer monitor(bufferSize);
monitor.estimated_count = entries.size();
vector<thread> threads;
for (int i = 0; i < inputMonitorCount; i++) {
threads.push_back(thread(filterEntries, &inputBuffer, &outputBuffer));
}
for (int i = 0; i < outputMonitorCount; i++) {
threads.push_back(thread(outputEntries, &inputBuffer, &outputBuffer, &outputList, &outputListMutex));
for (int i = 0; i < threadCount; i++) {
threads.push_back(thread(processEntries, &monitor, &outputList, &outputListMutex));
}
for (int i = 0; i < entries.size(); i++) {
inputBuffer.insert(entries[i]);
monitor.insert(entries[i]);
}
for (int i = 0; i < threads.size(); i++) {
threads[i].join();
}
@ -130,9 +100,3 @@ int main(int argc, char **argv) {
cout << "Finished" << endl;
return 0;
}
void execute(const string &name) {
cout << name << ": one" << endl;
cout << name << ": two" << endl;
cout << name << ": three" << endl;
}

View File

@ -1,5 +1,5 @@
# Lab1
```shell
go run limited_buffer.go main.go ../IF-1-1_PuzonasR_L1_dat_1.json IF-1-1_PuzonasR_L1_rez.txt
go run sorted_list.go limited_buffer.go main.go ../IF-1-1_PuzonasR_L1_dat_1.json IF-1-1_PuzonasR_L1_rez.txt
```

View File

@ -10,21 +10,12 @@ type LimitedBuffer struct {
mutex *sync.Mutex
updateCond *sync.Cond
estimatedSize int
estimatedSizeMutex *sync.Mutex
consumedCount int
consumedMutex *sync.Mutex
processedCount int
processedMutex *sync.Mutex
estimatedCount int
reservedCount int
}
func makeLimitedBuffer(bufferSize int) LimitedBuffer {
var container = make([]DataEntry, bufferSize)
var estimatedSizeMutex = sync.Mutex{}
var processedMutex = sync.Mutex{}
var consumedMutex = sync.Mutex{}
var itemsMutex = sync.Mutex{}
var cond = sync.NewCond(&itemsMutex)
@ -32,9 +23,6 @@ func makeLimitedBuffer(bufferSize int) LimitedBuffer {
items: container,
mutex: &itemsMutex,
updateCond: cond,
estimatedSizeMutex: &estimatedSizeMutex,
processedMutex: &processedMutex,
consumedMutex: &consumedMutex,
}
}
@ -63,43 +51,14 @@ func (buffer *LimitedBuffer) Remove() DataEntry {
return item
}
func (buffer *LimitedBuffer) IsDone() bool {
return buffer.estimatedSize == buffer.processedCount
}
func (buffer *LimitedBuffer) ReserveEntry() bool {
buffer.mutex.Lock()
defer buffer.mutex.Unlock()
func (buffer *LimitedBuffer) WaitUntilDone() {
for !buffer.IsDone() {}
}
func (buffer *LimitedBuffer) ConsumeEstimatedEntry() bool {
buffer.estimatedSizeMutex.Lock()
defer buffer.estimatedSizeMutex.Unlock()
if (buffer.estimatedSize == buffer.consumedCount) {
if buffer.estimatedCount == buffer.reservedCount {
return false
} else {
buffer.consumedCount += 1
buffer.reservedCount++
return true
}
}
func (buffer *LimitedBuffer) IsEstimatedEmpty() bool {
buffer.estimatedSizeMutex.Lock()
defer buffer.estimatedSizeMutex.Unlock()
return buffer.estimatedSize == 0
}
func (buffer *LimitedBuffer) UpdateEstimated(change int) {
buffer.estimatedSizeMutex.Lock()
defer buffer.estimatedSizeMutex.Unlock()
buffer.estimatedSize += change
}
func (buffer *LimitedBuffer) MarkAsProcessed() {
buffer.processedMutex.Lock()
defer buffer.processedMutex.Unlock()
buffer.processedCount += 1
}

View File

@ -5,16 +5,12 @@ import (
"fmt"
"log"
"os"
"sort"
"sync"
"time"
)
const inputBufferSize = 10
const inputMonitorCount = 3
const outputBufferSize = 10
const outputMonitorCount = 1
const bufferSize = 10
const threadCount = 3
type DataEntry struct {
Name string `json:"name"`
@ -22,48 +18,24 @@ type DataEntry struct {
Criteria int `json:"criteria"`
}
func filterEntry(entry DataEntry) bool {
func processEntry(entry DataEntry, outputEntries *SortedList, outputMutex *sync.Mutex) {
time.Sleep(time.Millisecond * 100 + time.Millisecond * time.Duration(entry.Criteria))
return entry.Sugar > float32(entry.Criteria)
if entry.Sugar > float32(entry.Criteria) {
outputMutex.Lock()
fmt.Println("Output:", entry)
outputEntries.Append(entry)
outputMutex.Unlock()
}
}
func outputEntry(outputEntries *[]DataEntry, entry DataEntry) {
time.Sleep(time.Millisecond * 200)
fmt.Println("Output:", entry)
*outputEntries = append(*outputEntries, entry)
sort.Slice(*outputEntries, func(i, j int) bool {
return (*outputEntries)[i].Sugar < (*outputEntries)[j].Sugar
})
}
func filterEntries(inputBuffer *LimitedBuffer, outputBuffer *LimitedBuffer) {
for inputBuffer.ConsumeEstimatedEntry() {
entry := inputBuffer.Remove()
func processEntries(monitor *LimitedBuffer, outputEntries *SortedList, outputMutex *sync.Mutex, waitGroup *sync.WaitGroup) {
for monitor.ReserveEntry() {
entry := monitor.Remove()
fmt.Println("Started to filter:", entry)
isFiltered := filterEntry(entry)
processEntry(entry, outputEntries, outputMutex)
fmt.Println("Finished to filter:", entry)
if (isFiltered) {
outputBuffer.UpdateEstimated(+1)
outputBuffer.Insert(entry)
}
inputBuffer.MarkAsProcessed()
}
}
func outputEntries(outputBuffer *LimitedBuffer, inputBuffer *LimitedBuffer, outputList *[]DataEntry, outputMutex *sync.Mutex) {
for !inputBuffer.IsDone() {
for outputBuffer.ConsumeEstimatedEntry() {
entry := outputBuffer.Remove()
outputMutex.Lock()
fmt.Println("Started to output:", entry)
outputEntry(outputList, entry)
fmt.Println("Finished to output:", entry)
outputMutex.Unlock()
outputBuffer.MarkAsProcessed()
}
}
waitGroup.Done()
}
func main() {
@ -85,25 +57,22 @@ func main() {
log.Fatal(err)
}
outputList := []DataEntry{}
inputBuffer := makeLimitedBuffer(inputBufferSize)
outputBuffer := makeLimitedBuffer(outputBufferSize)
outputList := makeSortedList(len(entries))
outputMutex := sync.Mutex{}
inputBuffer.UpdateEstimated(len(entries))
waitGroup := sync.WaitGroup{}
for i := 0; i < inputMonitorCount; i++ {
go filterEntries(&inputBuffer, &outputBuffer)
}
for i := 0; i < outputMonitorCount; i++ {
go outputEntries(&outputBuffer, &inputBuffer, &outputList, &outputMutex)
monitor := makeLimitedBuffer(bufferSize)
monitor.estimatedCount = len(entries)
for i := 0; i < threadCount; i++ {
go processEntries(&monitor, &outputList, &outputMutex, &waitGroup)
waitGroup.Add(1)
}
for _, entry := range entries {
inputBuffer.Insert(entry);
monitor.Insert(entry);
}
inputBuffer.WaitUntilDone()
outputBuffer.WaitUntilDone()
waitGroup.Wait()
outputFile, err := os.OpenFile(outputFilename, os.O_TRUNC | os.O_CREATE | os.O_WRONLY, 0644)
if err != nil {
@ -111,7 +80,7 @@ func main() {
}
defer outputFile.Close()
outputListBytes, err := json.MarshalIndent(outputList, "", "\t")
outputListBytes, err := json.MarshalIndent(outputList.GetSlice(), "", "\t")
if err != nil {
log.Fatal(err)
}

31
lab1a/go/sorted_list.go Normal file
View File

@ -0,0 +1,31 @@
package main
type SortedList struct {
items []DataEntry
count int
}
func makeSortedList(capacity int) SortedList {
var items = make([]DataEntry, capacity)
return SortedList{
items: items,
}
}
func (self *SortedList) Append(item DataEntry) {
if len(self.items) == self.count {
panic("SortedList is full")
}
self.items[self.count] = item
self.count++
for i := self.count-1; i >= 1; i-- {
if self.items[i].Sugar > self.items[i-1].Sugar { break }
self.items[i], self.items[i-1] = self.items[i-1], self.items[i] // Swap elements [i] and [i-1]
}
}
func (self *SortedList) GetSlice() []DataEntry {
return self.items[:self.count]
}

View File

@ -1 +0,0 @@
target

122
lab1a/rust/Cargo.lock generated
View File

@ -1,122 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "a-rust"
version = "0.1.0"
dependencies = [
"anyhow",
"crossbeam-channel",
"crossbeam-utils",
"serde_json",
]
[[package]]
name = "anyhow"
version = "1.0.75"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "crossbeam-channel"
version = "0.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200"
dependencies = [
"cfg-if",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-utils"
version = "0.8.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294"
dependencies = [
"cfg-if",
]
[[package]]
name = "itoa"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
[[package]]
name = "proc-macro2"
version = "1.0.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d433d9f1a3e8c1263d9456598b16fec66f4acc9a74dacffd35c7bb09b3a1328"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
dependencies = [
"proc-macro2",
]
[[package]]
name = "ryu"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741"
[[package]]
name = "serde"
version = "1.0.188"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.188"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.107"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]]
name = "syn"
version = "2.0.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91e02e55d62894af2a08aca894c6577281f76769ba47c94d5756bec8ac6e7373"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"

View File

@ -1,12 +0,0 @@
[package]
name = "a-rust"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
anyhow = "1.0.75"
crossbeam-channel = "0.5.8"
crossbeam-utils = "0.8.16"
serde_json = "1.0.107"

View File

@ -1,3 +0,0 @@
# Lab1a
THIS IS NOT FINISHED

View File

@ -1,7 +0,0 @@
#[derive(Debug)]
pub struct DataEntry {
pub name: String,
pub sugar: f32,
pub criteria: i32
}

View File

@ -1,59 +0,0 @@
use crate::common::DataEntry;
pub struct LimitedBuffer {
items: Vec<DataEntry>,
capacity: usize,
estimated_size: i32,
consumed_count: u32,
processed_count: u32
}
impl LimitedBuffer {
pub fn new(capacity: usize) -> LimitedBuffer {
return LimitedBuffer{
items: Vec::with_capacity(capacity),
capacity,
estimated_size: 0,
consumed_count: 0,
processed_count: 0
}
}
pub fn try_insert(&mut self, entry: DataEntry) -> bool {
if self.items.len() == self.capacity {
return false
}
self.items.push(entry);
return true
}
pub fn remove(&mut self) -> Option<DataEntry> {
if self.items.is_empty() {
None
} else {
Some(self.items.remove(0))
}
}
pub fn is_done(&self) -> bool {
return (self.estimated_size as u32) == self.processed_count
}
pub fn consume_estimated(&mut self) -> bool {
if (self.estimated_size as u32) == self.consumed_count {
return false;
} else {
self.consumed_count += 1;
return true;
}
}
pub fn update_estimated(&mut self, change: i32) {
self.estimated_size += change
}
pub fn mark_processed(&mut self) {
self.processed_count += 1
}
}

View File

@ -1,118 +0,0 @@
use std::{env::args, process::exit, thread, time::Duration, sync::{Arc, Mutex}};
use anyhow::Result;
mod limited_buffer;
mod common;
use common::DataEntry;
use limited_buffer::LimitedBuffer;
// This was a mistake
const INPUT_BUFFER_SIZE: usize = 2;
const INPUT_MONITOR_COUNT: i32 = 2;
const OUTPUT_BUFFER_SIZE: usize = 10;
const OUTPUT_MONITOR_COUNT: i32 = 1;
fn filter_entry(entry: &DataEntry) -> bool {
thread::sleep(Duration::from_millis(100 + entry.criteria as u64));
return entry.sugar > entry.criteria as f32
}
fn output_entry(output_list: &mut Vec<DataEntry>, entry: DataEntry) {
thread::sleep(Duration::from_millis(200));
output_list.push(entry);
output_list.sort_by(|a, b| a.sugar.total_cmp(&b.sugar))
}
fn filter_entries(input: Arc<Mutex<LimitedBuffer>>, output: Arc<Mutex<LimitedBuffer>>) {
while !input.lock().unwrap().is_done() {
if input.lock().unwrap().consume_estimated() {
let entry = input.lock().unwrap().remove();
if entry.is_none() { continue; }
let entry = entry.unwrap();
println!("Started to filter: {}", entry.name);
let is_filtered = filter_entry(&entry);
println!("Finished to filter: {}", entry.name);
if is_filtered {
// output.lock().unwrap().update_estimated(1);
// while !output.lock().unwrap().try_insert(entry) {}
}
input.lock().unwrap().mark_processed()
}
}
}
fn ouput_entries(input: Arc<Mutex<LimitedBuffer>>, output: Arc<Mutex<LimitedBuffer>>, output_list: Arc<Mutex<Vec<DataEntry>>>) {
while !input.lock().unwrap().is_done() {
let entry = input.lock().unwrap().remove();
if entry.is_none() { continue; }
let entry = entry.unwrap();
output.lock().unwrap().consume_estimated();
let entry_name = entry.name.clone();
println!("Started to output: {}", entry_name);
{
let mut output_list = output_list.lock().unwrap();
output_entry(&mut output_list, entry);
}
println!("Finished to output: {}", entry_name);
output.lock().unwrap().mark_processed()
}
}
fn main() -> Result<()> {
let args = args().collect::<Vec<_>>();
if args.len() != 3 {
eprintln!("Usage: {} <data-file> <output-file>", args[0]);
exit(-1)
}
let entries = vec![
DataEntry{ name: "foo1".into(), sugar: 100.0, criteria: 100 },
DataEntry{ name: "foo2".into(), sugar: 100.0, criteria: 100 },
DataEntry{ name: "foo3".into(), sugar: 100.0, criteria: 100 },
DataEntry{ name: "foo4".into(), sugar: 100.0, criteria: 100 },
DataEntry{ name: "foo5".into(), sugar: 100.0, criteria: 100 }
];
let mut input_threads = vec![];
// let mut output_threads = vec![];
// let output_list = Arc::new(Mutex::new(Vec::new()));
let input_buffer = Arc::new(Mutex::new(LimitedBuffer::new(INPUT_BUFFER_SIZE)));
let output_buffer = Arc::new(Mutex::new(LimitedBuffer::new(OUTPUT_BUFFER_SIZE)));
input_buffer.lock().unwrap().update_estimated(entries.len() as i32);
for _ in 0..INPUT_MONITOR_COUNT {
let input_buffer = input_buffer.clone();
let output_buffer = output_buffer.clone();
input_threads.push(thread::spawn(|| filter_entries(input_buffer, output_buffer)));
}
// for _ in 0..OUTPUT_MONITOR_COUNT {
// let input_buffer = input_buffer.clone();
// let output_buffer = output_buffer.clone();
// let output_list = output_list.clone();
// output_threads.push(thread::spawn(|| ouput_entries(input_buffer, output_buffer, output_list)));
// }
for entry in entries {
input_buffer.lock().unwrap().try_insert(entry);
}
for handle in input_threads {
handle.join().expect("Failed to join input thread");
}
// for handle in output_threads {
// handle.join().expect("Failed to join output thread");
// }
println!("Finished");
Ok(())
}