diff --git a/components/free_rtos/ethernet_industry/CoE/eth_ecat_pdo_fmmu.cpp b/components/free_rtos/ethernet_industry/CoE/eth_ecat_pdo_fmmu.cpp index 41d527b..205cd9e 100644 --- a/components/free_rtos/ethernet_industry/CoE/eth_ecat_pdo_fmmu.cpp +++ b/components/free_rtos/ethernet_industry/CoE/eth_ecat_pdo_fmmu.cpp @@ -61,25 +61,35 @@ void EthEcatPdoFMMU::wait_op() { } void EthEcatPdoFMMU::process_write_queue(uint8_t* process_data, uint32_t len) { - pdo_promise::IPDOPromise *next = write_queue_; + custom_promise::IPromise *next; + + mutex_write_.lock(); + + next = queue_write_.get_next(); + queue_write_.detach(); + + mutex_write_.unlock(); while(next != nullptr) { next->set_value(process_data, len); next = next->get_next(); } - - write_queue_ = nullptr; } void EthEcatPdoFMMU::process_read_queue(uint8_t* process_data, uint32_t len) { - pdo_promise::IPDOPromise *next = read_queue_; + custom_promise::IPromise *next; + + mutex_read_.lock(); + + next = queue_read_.get_next(); + queue_write_.detach(); + + mutex_read_.unlock(); while(next != nullptr) { next->set_value(process_data, len); next = next->get_next(); } - - read_queue_ = nullptr; } void EthEcatPdoFMMU::process() { @@ -88,8 +98,13 @@ void EthEcatPdoFMMU::process() { uint32_t logical_full_length_write = ecat_buffer_.get_fmmu_global_properties().logical_full_length_write; uint32_t logical_full_length_read = ecat_buffer_.get_fmmu_global_properties().logical_full_length_read; std::vector process_data(std::min(logical_full_length_write, logical_full_length_read)); + free_rtos::Timer& ecat_timer = ecat_buffer_.get_ecat().get_ecat_timer(); + + ecat_timer.Start(); while(1) { + ecat_timer.Wait(); + read(0, process_data); /* for(uint8_t& byte : process_data) { diff --git a/components/free_rtos/ethernet_industry/CoE/eth_ecat_pdo_fmmu.hpp b/components/free_rtos/ethernet_industry/CoE/eth_ecat_pdo_fmmu.hpp index 05a8342..93bf7e5 100644 --- a/components/free_rtos/ethernet_industry/CoE/eth_ecat_pdo_fmmu.hpp +++ b/components/free_rtos/ethernet_industry/CoE/eth_ecat_pdo_fmmu.hpp @@ -9,9 +9,10 @@ #define FREE_RTOS_ETHERNET_INDUSTRY_COE_ETH_ECAT_PDO_FMMU_HPP_ #include "semaphore/semaphore.hpp" +#include "mutex/mutex.hpp" #include "ethernet_industry/eth_ecat_buffer.hpp" -#include "ethernet_industry/CoE/eth_ecat_pdo_promise.hpp" +#include "ethernet_industry/eth_ecat_custom_promise.hpp" namespace free_rtos { @@ -46,15 +47,14 @@ private: ecat_buffer::EcatBufferSlave& buffer_slave_; }; - +/* // Функтор для обхода и передачи датаграмм в custom_tuple struct DatagramFunctor { - datagram::EcatTelegram& telegram; + telegram::EcatTelegram& telegram; size_t number_of_slaves; datagram::IEcatDatagram *next; - template - void operator()(DatagramT& datagram) { + void operator()(datagram::IEcatDatagram& datagram) { if(next != nullptr) { datagram + *next; } @@ -72,7 +72,7 @@ struct DatagramFunctor { } while(next->get_all_wkc() < number_of_slaves); } }; - +*/ class EthEcatPdoFMMU { public: EthEcatPdoFMMU(ecat_buffer::EthEcatBuffer& ecat_buffer): ecat_buffer_{ecat_buffer} { } @@ -82,28 +82,46 @@ public: template void pdo_write(address::Offset offset, DataTypes&... data) { - pdo_promise::PDOWritePromise promise{offset, data...}; + custom_promise::WritePromise promise{offset, data...}; - write_queue_ = &promise; // TODO: Доделать добавление в очередь более одного элемента + mutex_write_.lock(); + //last_write_ = (*last_write_) + &promise; + queue_write_ + promise; + mutex_write_.unlock(); promise.get_future().get(); } template void pdo_read(address::Offset offset, DataTypes&... data) { - pdo_promise::PDOReadPromise promise{offset, data...}; + custom_promise::ReadPromise promise{offset, data...}; - read_queue_ = &promise; // TODO: Доделать добавление в очередь более одного элемента + mutex_read_.lock(); + //last_read_ = (*last_read_) + &promise; + queue_read_ + promise; + mutex_read_.unlock(); promise.get_future().get(); } - void pdo_write_async(pdo_promise::IPDOPromise& promise) { - write_queue_ = &promise; // TODO: Доделать добавление в очередь более одного элемента + + void pdo_write_async(custom_promise::IPromise& promise) { + mutex_write_.lock(); + + //last_write_ = (*last_write_) + &promise; + queue_write_ + promise; + + mutex_write_.unlock(); } - void pdo_read_async(pdo_promise::IPDOPromise& promise) { - read_queue_ = &promise; // TODO: Доделать добавление в очередь более одного элемента + + void pdo_read_async(custom_promise::IPromise& promise) { + mutex_read_.lock(); + + //last_read_ = (*last_read_) + &promise; + queue_read_ + promise; + + mutex_read_.unlock(); } private: @@ -111,14 +129,70 @@ private: std::vector pdo_fmmu_slaves_; - pdo_promise::IPDOPromise *write_queue_{nullptr}; - pdo_promise::IPDOPromise *read_queue_{nullptr}; + Mutex mutex_write_; + Mutex mutex_read_; + + custom_promise::WritePromise<> queue_write_{0}; + custom_promise::ReadPromise<> queue_read_{0}; + + //custom_promise::IPromise *last_write_{&queue_write_}; + //custom_promise::IPromise *last_read_{&queue_read_}; void wait_op(); void process_write_queue(uint8_t* process_data, uint32_t len); void process_read_queue(uint8_t* process_data, uint32_t len); + template + void write(address::Offset offset, DataTypes&... data) { + using TDatagram = datagram::EcatDatagram; + + telegram::EcatTelegram& telegram = ecat_buffer_.get_ecat().get_telegram(); + ecat_buffer::FMMUGlobalProperties& fmmu_global_properties = ecat_buffer_.get_fmmu_global_properties(); + address::Logical logical = fmmu_global_properties.logical_start_address + offset; + TDatagram datagram{ {{logical}}, data... }; + + do { + telegram.transfer(datagram); + } while(datagram.get_wkc() < 0x0001); + } + + template + void read(address::Offset offset, DataTypes&... data) { + using TDatagram = datagram::EcatDatagram; + + telegram::EcatTelegram& telegram = ecat_buffer_.get_ecat().get_telegram(); + ecat_buffer::FMMUGlobalProperties& fmmu_global_properties = ecat_buffer_.get_fmmu_global_properties(); + address::Logical logical = fmmu_global_properties.logical_start_address + fmmu_global_properties.logical_full_length_write + offset; + TDatagram datagram{ {{logical}}, data... }; + + do { + telegram.transfer(datagram); + } while(datagram.get_wkc() < 0x0001); + } + + template + void read_write(address::Offset offset_read, address::Offset offset_write, DataTypes&... data) { + using TDatagramRead = datagram::EcatDatagram; + using TDatagramWrite = datagram::EcatDatagram; + + telegram::EcatTelegram& telegram = ecat_buffer_.get_ecat().get_telegram(); + ecat_buffer::FMMUGlobalProperties& fmmu_global_properties = ecat_buffer_.get_fmmu_global_properties(); + + address::Logical logical_read = fmmu_global_properties.logical_start_address + fmmu_global_properties.logical_full_length_write + offset_read; + TDatagramRead datagram_read{ {{logical_read}}, data... }; + + address::Logical logical_write = fmmu_global_properties.logical_start_address + offset_write; + TDatagramWrite datagram_write{ {{logical_write}}, data... }; + + datagram_read + datagram_write; + + do { + telegram.transfer(datagram_read); + } while(datagram_read.get_all_wkc() < 0x0001); + } + +/* // Запись PDO замапленных каждым слейвом template void write(DataTypes&... data) { @@ -165,55 +239,7 @@ private: for_each_reverse(datagram_tuple, functor); } - - template - void write(address::Offset offset, DataTypes&... data) { - using TDatagram = datagram::EcatDatagram; - - datagram::EcatTelegram& telegram = ecat_buffer_.get_ecat().get_telegram(); - ecat_buffer::FMMUGlobalProperties& fmmu_global_properties = ecat_buffer_.get_fmmu_global_properties(); - address::Logical logical = fmmu_global_properties.logical_start_address + offset; - TDatagram datagram{ {{logical}}, data... }; - - do { - telegram.transfer(datagram); - } while(datagram.get_wkc() < 0x0001); - } - - template - void read(address::Offset offset, DataTypes&... data) { - using TDatagram = datagram::EcatDatagram; - - datagram::EcatTelegram& telegram = ecat_buffer_.get_ecat().get_telegram(); - ecat_buffer::FMMUGlobalProperties& fmmu_global_properties = ecat_buffer_.get_fmmu_global_properties(); - address::Logical logical = fmmu_global_properties.logical_start_address + fmmu_global_properties.logical_full_length_write + offset; - TDatagram datagram{ {{logical}}, data... }; - - do { - telegram.transfer(datagram); - } while(datagram.get_wkc() < 0x0001); - } - - template - void read_write(address::Offset offset_read, address::Offset offset_write, DataTypes&... data) { - using TDatagramRead = datagram::EcatDatagram; - using TDatagramWrite = datagram::EcatDatagram; - - datagram::EcatTelegram& telegram = ecat_buffer_.get_ecat().get_telegram(); - ecat_buffer::FMMUGlobalProperties& fmmu_global_properties = ecat_buffer_.get_fmmu_global_properties(); - - address::Logical logical_read = fmmu_global_properties.logical_start_address + fmmu_global_properties.logical_full_length_write + offset_read; - TDatagramRead datagram_read{ {{logical_read}}, data... }; - - address::Logical logical_write = fmmu_global_properties.logical_start_address + offset_write; - TDatagramWrite datagram_write{ {{logical_write}}, data... }; - - datagram_read + datagram_write; - - do { - telegram.transfer(datagram_read); - } while(datagram_read.get_all_wkc() < 0x0001); - } +*/ }; diff --git a/components/free_rtos/ethernet_industry/CoE/eth_ecat_pdo_promise.hpp b/components/free_rtos/ethernet_industry/CoE/eth_ecat_pdo_promise.hpp deleted file mode 100644 index 96a46d0..0000000 --- a/components/free_rtos/ethernet_industry/CoE/eth_ecat_pdo_promise.hpp +++ /dev/null @@ -1,129 +0,0 @@ -/* - * eth_ecat_pdo_promise.hpp - * - * Created on: Jun 1, 2023 - * Author: algin - */ - -#ifndef FREE_RTOS_ETHERNET_INDUSTRY_COE_ETH_ECAT_PDO_PROMISE_HPP_ -#define FREE_RTOS_ETHERNET_INDUSTRY_COE_ETH_ECAT_PDO_PROMISE_HPP_ - -#include - -#include "semaphore/semaphore.hpp" - -#include "ethernet_industry/eth_ecat_command.hpp" -#include "ethernet_industry/eth_ecat_custom_tuple.hpp" - -namespace free_rtos { - -namespace pdo_promise { - -template -class PDOFuture { -public: - PDOFuture(DataTypes&... data) - : data_tuple_{data...} { } - - bool is_ready() { - return ready_; - } - - custom_tuple get() { - ready_ = false; - sem_.pend(); - - return data_tuple_; - } - - void pack(uint8_t* raw) { - PackFunctor functor{raw}; - - for_each(data_tuple_, functor); - - sem_.post(); - ready_ = true; - } - - void unpack(uint8_t* raw) { - UnpackFunctor functor{raw}; - - for_each(data_tuple_, functor); - - sem_.post(); - ready_ = true; - } - -private: - custom_tuple data_tuple_; - free_rtos::Semaphore sem_; - bool ready_{false}; -}; - -class IPDOPromise { -public: - IPDOPromise(address::Offset offset) - : offset_{offset} { } - - IPDOPromise* get_next() { - return next_; - } - - IPDOPromise& operator+(IPDOPromise &next) { - next_ = &next; - - return next; - } - - virtual void set_value(uint8_t* process_data, uint32_t len) = 0; - -protected: - address::Offset offset_; - -private: - IPDOPromise *next_{nullptr}; -}; - -template -class PDOWritePromise : public IPDOPromise { -public: - PDOWritePromise(address::Offset offset, DataTypes&... data) - : IPDOPromise{offset} - , future_{data...} { } - - PDOFuture& get_future() { - return future_; - } - - virtual void set_value(uint8_t* process_data, uint32_t len) override { - future_.pack(process_data + offset_); - } - -private: - PDOFuture future_; -}; - -template -class PDOReadPromise : public IPDOPromise { -public: - PDOReadPromise(address::Offset offset, DataTypes&... data) - : IPDOPromise{offset} - , future_{data...} { } - - PDOFuture& get_future() { - return future_; - } - - virtual void set_value(uint8_t* process_data, uint32_t len) override { - future_.unpack(process_data + offset_); - } - -private: - PDOFuture future_; -}; - -} // namespace pdo_promise - -} // namespace free_rtos - -#endif /* FREE_RTOS_ETHERNET_INDUSTRY_COE_ETH_ECAT_PDO_PROMISE_HPP_ */ diff --git a/components/free_rtos/ethernet_industry/CoE/eth_ecat_sdo_mailbox.cpp b/components/free_rtos/ethernet_industry/CoE/eth_ecat_sdo_mailbox.cpp index ddb52d5..4f65ead 100644 --- a/components/free_rtos/ethernet_industry/CoE/eth_ecat_sdo_mailbox.cpp +++ b/components/free_rtos/ethernet_industry/CoE/eth_ecat_sdo_mailbox.cpp @@ -23,7 +23,7 @@ void EthEcatSdoMailbox::init() { } void EthEcatSdoMailbox::read_pdo_map(uint16_t pdo_map_rx_index, uint16_t pdo_map_tx_index) { - datagram::EcatTelegram& telegram = ecat_buffer_.get_ecat().get_telegram(); + telegram::EcatTelegram& telegram = ecat_buffer_.get_ecat().get_telegram(); for(EcatSdoMailboxSlave& sdo_mailbox_slave : sdo_mailbox_slaves_) { uint16_t pdo_rx_data_size{0x0000}; diff --git a/components/free_rtos/ethernet_industry/CoE/eth_ecat_sdo_mailbox.hpp b/components/free_rtos/ethernet_industry/CoE/eth_ecat_sdo_mailbox.hpp index e4afe92..383360e 100644 --- a/components/free_rtos/ethernet_industry/CoE/eth_ecat_sdo_mailbox.hpp +++ b/components/free_rtos/ethernet_industry/CoE/eth_ecat_sdo_mailbox.hpp @@ -99,8 +99,10 @@ struct MailboxHeader { } // namespace ecat_sdo_mailbox // Специализация шаблона для распаковки протокола CoE +// наследоваться от each_tuple_element< FunctorT, custom_tuple > не обязательно +// т.к. используются только статические методы template -struct each_tuple_element> { +struct each_tuple_element< FunctorT, custom_tuple > { using TTuple = custom_tuple; using TBase = each_tuple_element; using TBaseBase = each_tuple_element; @@ -145,7 +147,7 @@ public: : buffer_slave_(mailbox_slave) { } template - void wait_available(datagram::EcatTelegram& telegram) { + void wait_available(telegram::EcatTelegram& telegram) { using TCommand = command::EcatCommand; auto slave_address = buffer_slave_.get_slave().get_slave_address(); @@ -159,7 +161,7 @@ public: } template - void wait_empty(datagram::EcatTelegram& telegram) { + void wait_empty(telegram::EcatTelegram& telegram) { using TCommand = command::EcatCommand; auto slave_address = buffer_slave_.get_slave().get_slave_address(); @@ -173,7 +175,7 @@ public: } template - void empty(datagram::EcatTelegram& telegram) { + void empty(telegram::EcatTelegram& telegram) { using TCommand = command::EcatCommand; auto slave_address = buffer_slave_.get_slave().get_slave_address(); @@ -195,7 +197,7 @@ public: } template - void send_data(datagram::EcatTelegram& telegram, uint16_t channel, uint16_t priority, ProtocolType type, DataTypes&... data) { + void send_data(telegram::EcatTelegram& telegram, uint16_t channel, uint16_t priority, ProtocolType type, DataTypes&... data) { using TCommand = command::EcatCommand; using TDatagram = datagram::EcatDatagram; @@ -221,7 +223,7 @@ public: } template - void receive_data(datagram::EcatTelegram& telegram, DataTypes&... data) { + void receive_data(telegram::EcatTelegram& telegram, DataTypes&... data) { using TCommand = command::EcatCommand; using TDatagram = datagram::EcatDatagram; @@ -244,13 +246,13 @@ public: } template - void receive(datagram::EcatTelegram& telegram, DataTypes&... data) { + void receive(telegram::EcatTelegram& telegram, DataTypes&... data) { wait_available(telegram); receive_data(telegram, data...); } template - void send(datagram::EcatTelegram& telegram, uint16_t channel, uint16_t priority, ProtocolType type, DataTypes&... data) { + void send(telegram::EcatTelegram& telegram, uint16_t channel, uint16_t priority, ProtocolType type, DataTypes&... data) { empty(telegram); wait_empty(telegram); send_data(telegram, channel, priority, type, data...); @@ -258,7 +260,7 @@ public: } template - CompleteSize sdo_write(datagram::EcatTelegram& telegram, uint16_t index, uint8_t subindex, DataTypes&... data) { + CompleteSize sdo_write(telegram::EcatTelegram& telegram, uint16_t index, uint8_t subindex, DataTypes&... data) { CoEElements elements{ .coe_header = { .number = 0x00, @@ -296,7 +298,7 @@ public: } template - CompleteSize sdo_read(datagram::EcatTelegram& telegram, uint16_t index, uint8_t subindex, DataTypes&... data) { + CompleteSize sdo_read(telegram::EcatTelegram& telegram, uint16_t index, uint8_t subindex, DataTypes&... data) { CoEElements elements{ .coe_header = { .number = 0x00, @@ -334,7 +336,7 @@ public: } template - uint16_t pdo_map_read(datagram::EcatTelegram& telegram, ecat_buffer::PDOMap& pdo_map, uint16_t pdo_map_index) { + uint16_t pdo_map_read(telegram::EcatTelegram& telegram, ecat_buffer::PDOMap& pdo_map, uint16_t pdo_map_index) { uint16_t pdo_data_size{0x0000}; // Размер данных в битах ! uint8_t pdo_block_count{0x00}; @@ -402,14 +404,14 @@ public: template CompleteSize sdo_write(size_t slave_index, uint16_t index, uint8_t subindex, DataTypes&... data) { - datagram::EcatTelegram& telegram = ecat_buffer_.get_ecat().get_telegram(); + telegram::EcatTelegram& telegram = ecat_buffer_.get_ecat().get_telegram(); return sdo_mailbox_slaves_[slave_index].sdo_write(telegram, index, subindex, data...); } template CompleteSize sdo_read(size_t slave_index, uint16_t index, uint8_t subindex, DataTypes&... data) { - datagram::EcatTelegram& telegram = ecat_buffer_.get_ecat().get_telegram(); + telegram::EcatTelegram& telegram = ecat_buffer_.get_ecat().get_telegram(); return sdo_mailbox_slaves_[slave_index].sdo_read(telegram, index, subindex, data...); } diff --git a/components/free_rtos/ethernet_industry/eth_ecat.cpp b/components/free_rtos/ethernet_industry/eth_ecat.cpp index 9e65a7b..6ae797b 100644 --- a/components/free_rtos/ethernet_industry/eth_ecat.cpp +++ b/components/free_rtos/ethernet_industry/eth_ecat.cpp @@ -21,6 +21,18 @@ EthEcat::EthEcat(Eth& eth) void EthEcat::Init(TEthMacPorts port_id) { port_id_ = port_id; telegram_.init(port_id); + + Timer::Settings ecat_tmr_sett = { + .input_clk_Hz = 25000000, // 25MHz + .base_address = 0x2400000u, // memory mapping, + .clock_src_mux_addr = 0x430081B0u, // sysconfig + .int_num = 152u, // sysconfig + .int_priority = 4, // sysconfig + .period_us = 400 ///400 microsec + }; + + ecat_timer_.Init(ecat_tmr_sett); + ecat_timer_.Stop(); } void EthEcat::set_slaves_to_default() { diff --git a/components/free_rtos/ethernet_industry/eth_ecat.hpp b/components/free_rtos/ethernet_industry/eth_ecat.hpp index 5d582cb..230aff1 100644 --- a/components/free_rtos/ethernet_industry/eth_ecat.hpp +++ b/components/free_rtos/ethernet_industry/eth_ecat.hpp @@ -14,13 +14,16 @@ #include "handler_store/handler.hpp" #include "ethernet/eth_frame.h" +#include "timer/timer.hpp" #include "mutex/mutex.hpp" #include "semaphore/semaphore.hpp" + #include "ethernet/eth.hpp" #include "ethernet_industry/ethercattype.hpp" #include "ethernet_industry/eth_ecat_types.h" #include "ethernet_industry/eth_ecat_command.hpp" #include "ethernet_industry/eth_ecat_datagram.hpp" +#include "ethernet_industry/eth_ecat_telegram.hpp" #include "ethernet_industry/eth_ecat_eeprom.hpp" namespace free_rtos { @@ -50,7 +53,7 @@ public: } template - void enable_PDI(datagram::EcatTelegram& telegram) { + void enable_PDI(telegram::EcatTelegram& telegram) { using TCommand = command::EcatCommand; auto slave_address = get_slave_address(); uint8_t data{0x01}; @@ -62,7 +65,7 @@ public: } template - bool init_to_preop(datagram::EcatTelegram& telegram) { + bool init_to_preop(telegram::EcatTelegram& telegram) { auto slave_address = get_slave_address(); ALSTAT stat{0x0000, 0x0000}; @@ -104,7 +107,7 @@ public: } template - bool preop_to_safeop(datagram::EcatTelegram& telegram) { + bool preop_to_safeop(telegram::EcatTelegram& telegram) { auto slave_address = get_slave_address(); ALSTAT stat{0x0000, 0x0000}; uint32_t zero{0x00000000}; @@ -149,7 +152,7 @@ public: } template - bool safeop_to_op(datagram::EcatTelegram& telegram) { + bool safeop_to_op(telegram::EcatTelegram& telegram) { auto slave_address = get_slave_address(); ALSTAT stat{0x0000, 0x0000}; uint16_t zero{0x00000000}; @@ -233,7 +236,7 @@ public: telegram_.transfer(datagram); } - datagram::EcatTelegram& get_telegram() { + telegram::EcatTelegram& get_telegram() { return telegram_; } @@ -245,6 +248,10 @@ public: return slaves_; } + free_rtos::Timer& get_ecat_timer() { + return ecat_timer_; + } + free_rtos::Semaphore& get_init_sem() { return init_sem_; } @@ -266,6 +273,8 @@ private: //Mutex mut_; + free_rtos::Timer ecat_timer_; + free_rtos::Semaphore rx_sem_; free_rtos::Semaphore init_sem_; @@ -274,7 +283,7 @@ private: Eth& eth_; EthTxFlowIface& tx_flow_; - datagram::EcatTelegram telegram_; + telegram::EcatTelegram telegram_; eeprom::EEPROM eeprom_; TEthMacPorts port_id_; /// ���� ���� ����� ���������� ������ diff --git a/components/free_rtos/ethernet_industry/eth_ecat_api.hpp b/components/free_rtos/ethernet_industry/eth_ecat_api.hpp index 4e487fe..db9ce11 100644 --- a/components/free_rtos/ethernet_industry/eth_ecat_api.hpp +++ b/components/free_rtos/ethernet_industry/eth_ecat_api.hpp @@ -45,11 +45,11 @@ public: get_ecat_pdo_fmmu().pdo_read(offset, data...); } - static void pdo_write_async(pdo_promise::IPDOPromise& promise) { + static void pdo_write_async(custom_promise::IPromise& promise) { get_ecat_pdo_fmmu().pdo_write_async(promise); } - static void pdo_read_async(pdo_promise::IPDOPromise& promise) { + static void pdo_read_async(custom_promise::IPromise& promise) { get_ecat_pdo_fmmu().pdo_read_async(promise); } diff --git a/components/free_rtos/ethernet_industry/eth_ecat_buffer.cpp b/components/free_rtos/ethernet_industry/eth_ecat_buffer.cpp index a04f63b..134c086 100644 --- a/components/free_rtos/ethernet_industry/eth_ecat_buffer.cpp +++ b/components/free_rtos/ethernet_industry/eth_ecat_buffer.cpp @@ -30,7 +30,7 @@ void EthEcatBuffer::init(uint16_t rx_eeprom_addr, uint16_t tx_eeprom_addr) { } void EthEcatBuffer::init_sync_manager(sync_manager sm_write, sync_manager sm_read) { - datagram::EcatTelegram& telegram = ecat_.get_telegram(); + telegram::EcatTelegram& telegram = ecat_.get_telegram(); for(EcatBufferSlave& buffer_slave : buffer_slaves_) { buffer_slave.init_sync_manager(telegram, sm_write, sm_read); @@ -38,7 +38,7 @@ void EthEcatBuffer::init_sync_manager(sync_manager sm_write, sync_manager sm_rea } void EthEcatBuffer::init_fmmu(fmmu fmmu_write, fmmu fmmu_read) { - datagram::EcatTelegram& telegram = ecat_.get_telegram(); + telegram::EcatTelegram& telegram = ecat_.get_telegram(); for(EcatBufferSlave& buffer_slave : buffer_slaves_) { buffer_slave.init_fmmu_write(telegram, fmmu_write, fmmu_global_properties_); diff --git a/components/free_rtos/ethernet_industry/eth_ecat_buffer.hpp b/components/free_rtos/ethernet_industry/eth_ecat_buffer.hpp index e1010f7..51f9348 100644 --- a/components/free_rtos/ethernet_industry/eth_ecat_buffer.hpp +++ b/components/free_rtos/ethernet_industry/eth_ecat_buffer.hpp @@ -172,7 +172,7 @@ public: } template - void init_sync_manager(datagram::EcatTelegram& telegram, sync_manager sm_write, sync_manager sm_read) { + void init_sync_manager(telegram::EcatTelegram& telegram, sync_manager sm_write, sync_manager sm_read) { SyncManager sync_manager_write = sync_managers_[static_cast(sm_write)]; auto datagram_write = make_sync_manager_datagram(sync_manager_write, buffer_properties_write_); @@ -212,7 +212,7 @@ public: } template - void init_fmmu_write(datagram::EcatTelegram& telegram, fmmu fmmu, FMMUGlobalProperties& fmmu_global_properties) { + void init_fmmu_write(telegram::EcatTelegram& telegram, fmmu fmmu, FMMUGlobalProperties& fmmu_global_properties) { fmmu_write_ = fmmu; FMMUSettings settings { @@ -240,7 +240,7 @@ public: } template - void init_fmmu_read(datagram::EcatTelegram& telegram, fmmu fmmu, FMMUGlobalProperties& fmmu_global_properties) { + void init_fmmu_read(telegram::EcatTelegram& telegram, fmmu fmmu, FMMUGlobalProperties& fmmu_global_properties) { fmmu_read_ = fmmu; FMMUSettings settings { @@ -268,7 +268,7 @@ public: } template - void init_fmmu(datagram::EcatTelegram& telegram, fmmu fmmu_write, fmmu fmmu_read, FMMUGlobalProperties& fmmu_global_properties) { + void init_fmmu(telegram::EcatTelegram& telegram, fmmu fmmu_write, fmmu fmmu_read, FMMUGlobalProperties& fmmu_global_properties) { fmmu_write_ = fmmu_write; fmmu_read_ = fmmu_read; diff --git a/components/free_rtos/ethernet_industry/eth_ecat_command.hpp b/components/free_rtos/ethernet_industry/eth_ecat_command.hpp index 42eb17b..d15d872 100644 --- a/components/free_rtos/ethernet_industry/eth_ecat_command.hpp +++ b/components/free_rtos/ethernet_industry/eth_ecat_command.hpp @@ -15,6 +15,7 @@ #include "ethernet_industry/ethercattype.hpp" #include "ethernet_industry/eth_ecat_types.h" #include "ethernet_industry/eth_ecat_custom_tuple.hpp" +#include "ethernet_industry/eth_ecat_packer.hpp" namespace free_rtos { diff --git a/components/free_rtos/ethernet_industry/eth_ecat_custom_promise.hpp b/components/free_rtos/ethernet_industry/eth_ecat_custom_promise.hpp new file mode 100644 index 0000000..da97d38 --- /dev/null +++ b/components/free_rtos/ethernet_industry/eth_ecat_custom_promise.hpp @@ -0,0 +1,196 @@ +/* + * eth_ecat_custom_promise.hpp + * + * Created on: Jun 1, 2023 + * Author: algin + */ + +#ifndef FREE_RTOS_ETHERNET_INDUSTRY_COE_ETH_ECAT_CUSTOM_PROMISE_HPP_ +#define FREE_RTOS_ETHERNET_INDUSTRY_COE_ETH_ECAT_CUSTOM_PROMISE_HPP_ + +#include + +#include "semaphore/semaphore.hpp" + +#include "ethernet_industry/eth_ecat_queue.hpp" +#include "ethernet_industry/eth_ecat_datagram.hpp" +#include "ethernet_industry/eth_ecat_command.hpp" +#include "ethernet_industry/eth_ecat_custom_tuple.hpp" +#include "ethernet_industry/eth_ecat_packer.hpp" + +namespace free_rtos { + +namespace custom_promise { + +/* +// Функтор для обхода и упаковки датаграмм в custom_tuple +struct DatagramPackFunctor : public PackFunctor { + DatagramPackFunctor(uint8_t *raw) + : PackFunctor{raw} { } + + using PackFunctor::operator (); + + template + void operator()(datagram::EcatDatagram& data) { + raw = data.pack(raw); + } + + template + void operator()(std::vector< datagram::EcatDatagram >& data) { + for(uint16_t i = 1; i < data.size(); i++) { + data[i - 1] + data[i]; + + raw = data[i - 1].pack(raw); + } + + raw = data[data.size() - 1].pack(raw); + } +}; +*/ + +// Функтор для обхода и распаковки датаграмм в custom_tuple +struct DatagramUnpackFunctor : public UnpackFunctor { + DatagramUnpackFunctor(uint8_t *raw) + : UnpackFunctor{raw} { } + + using UnpackFunctor::operator (); + + template + void operator()(datagram::EcatDatagram& data) { + raw = data.unpack(raw); + } + + template + void operator()(std::vector< datagram::EcatDatagram >& data) { + for(uint16_t i = 1; i < data.size(); i++) { + data[i - 1] + data[i]; + + raw = data[i - 1].unpack(raw); + } + + raw = data[data.size() - 1].unpack(raw); + } +}; + +template +class Future { +public: + Future(DataTypes&... data) + : data_tuple_{data...} { } + + bool is_ready() { + return ready_; + } + + custom_tuple get() { + ready_ = false; + sem_.pend(); + + return data_tuple_; + } + + void pack(uint8_t* raw) { + PackFunctor functor{raw}; + + for_each(data_tuple_, functor); + + sem_.post(); + ready_ = true; + } + + void unpack(uint8_t* raw) { + UnpackFunctor functor{raw}; + + for_each(data_tuple_, functor); + + sem_.post(); + ready_ = true; + } + +private: + custom_tuple data_tuple_; + free_rtos::Semaphore sem_; + bool ready_{false}; +}; + +class IPromise { +public: + IPromise(address::Offset offset = 0) + : offset_{offset} { } + + IPromise* get_next() { + queue::QueueEntity* next = queue_entity_.get_next(); + + if(next == nullptr) { + return nullptr; + } + + return next->get_data(); + } + + queue::QueueEntity& get_queue_entity() { + return queue_entity_; + } + + IPromise& operator+(IPromise &next) { + queue_entity_ + next.get_queue_entity(); + + return next; + } + + void detach() { + queue_entity_.detach(); + } + + virtual void set_value(uint8_t* process_data, uint32_t len) = 0; + +protected: + address::Offset offset_{0}; + +private: + queue::QueueEntity queue_entity_{this}; +}; + +template +class WritePromise : public IPromise { +public: + WritePromise(address::Offset offset, DataTypes&... data) + : IPromise{offset} + , future_{data...} { } + + Future& get_future() { + return future_; + } + + virtual void set_value(uint8_t* process_data, uint32_t len) override { + future_.pack(process_data + offset_); + } + +private: + Future future_; +}; + +template +class ReadPromise : public IPromise { +public: + ReadPromise(address::Offset offset, DataTypes&... data) + : IPromise{offset} + , future_{data...} { } + + Future& get_future() { + return future_; + } + + virtual void set_value(uint8_t* process_data, uint32_t len) override { + future_.unpack(process_data + offset_); + } + +private: + Future future_; +}; + +} // namespace custom_promise + +} // namespace free_rtos + +#endif /* FREE_RTOS_ETHERNET_INDUSTRY_COE_ETH_ECAT_CUSTOM_PROMISE_HPP_ */ diff --git a/components/free_rtos/ethernet_industry/eth_ecat_custom_tuple.hpp b/components/free_rtos/ethernet_industry/eth_ecat_custom_tuple.hpp index 79196fc..060fd58 100644 --- a/components/free_rtos/ethernet_industry/eth_ecat_custom_tuple.hpp +++ b/components/free_rtos/ethernet_industry/eth_ecat_custom_tuple.hpp @@ -46,8 +46,11 @@ struct custom_tuple<> { template struct custom_tuple_element; +// Основная специализация шаблона +// Наследоваться от custom_tuple_element< index - 1, custom_tuple > не обязательно +// т.к. используются только статические методы template -struct custom_tuple_element> { +struct custom_tuple_element< index, custom_tuple > { using TTuple = custom_tuple; using TBase = custom_tuple_element; using type = typename TBase::type; @@ -80,8 +83,10 @@ template struct each_tuple_element; // Основная специализация шаблона. Есть еще одна в протоколе CoE SDO ! +// Наследоваться от each_tuple_element< FunctorT, custom_tuple > не обязательно +// т.к. используются только статические методы template -struct each_tuple_element> { +struct each_tuple_element< FunctorT, custom_tuple > { using TTuple = custom_tuple; using TBase = each_tuple_element; @@ -98,7 +103,7 @@ struct each_tuple_element> { // Специализация завершения рекурсии template -struct each_tuple_element> { +struct each_tuple_element< FunctorT, custom_tuple<> > { using TTuple = custom_tuple<>; static void for_each(FunctorT& functor, TTuple& t) { } @@ -119,69 +124,6 @@ void for_each_reverse(TupleT& t, FunctorT& functor) { functor(); } - -struct Padding { - size_t size; -}; - -// Функтор для обхода и упаковки элементов custom_tuple -struct PackFunctor { - uint8_t *raw; - - template - void operator()(DataT& data) { - DataT *data_p = new(raw) DataT{data}; - - (void)data_p; - - raw += sizeof(DataT); - } - - template - void operator()(std::vector& data) { - size_t size = data.size() * sizeof(DataT); - - memcpy(raw, data.data(), size); - - raw += size; - } - - void operator()(Padding& padding) { - raw += padding.size; - } - - void operator()() { } -}; - -// Функтор для обхода и распаковки элементов custom_tuple -struct UnpackFunctor { - uint8_t *raw; - - template - void operator()(DataT& data) { - DataT *p_data = reinterpret_cast(raw); - - data = *p_data; - - raw += sizeof(DataT); - } - - template - void operator()(std::vector& data) { - size_t size = data.size() * sizeof(DataT); - - memcpy(data.data(), raw, size); - - raw += size; - } - - void operator()(Padding& padding) { - raw += padding.size; - } - - void operator()() { } -}; - } #endif /* FREE_RTOS_ETHERNET_INDUSTRY_ETH_ECAT_CUSTOM_TUPLE_HPP_ */ diff --git a/components/free_rtos/ethernet_industry/eth_ecat_datagram.hpp b/components/free_rtos/ethernet_industry/eth_ecat_datagram.hpp index de9953a..d7eeb7d 100644 --- a/components/free_rtos/ethernet_industry/eth_ecat_datagram.hpp +++ b/components/free_rtos/ethernet_industry/eth_ecat_datagram.hpp @@ -10,10 +10,11 @@ #include -#include "ethernet/eth.hpp" #include "ethernet_industry/ethercattype.hpp" #include "ethernet_industry/eth_ecat_types.h" +#include "ethernet_industry/eth_ecat_queue.hpp" #include "ethernet_industry/eth_ecat_custom_tuple.hpp" +#include "ethernet_industry/eth_ecat_packer.hpp" #include "ethernet_industry/eth_ecat_command.hpp" namespace free_rtos { @@ -33,21 +34,28 @@ public: virtual ~IEcatDatagram() { }; + IEcatDatagram* get_next() { + queue::QueueEntity* next = queue_entity_.get_next(); + + if(next == nullptr) { + return nullptr; + } + + return next->get_data(); + } + + queue::QueueEntity& get_queue_entity() { + return queue_entity_; + } + IEcatDatagram& operator+(IEcatDatagram &next) { more_ = ec_moredatagrams::EC_MOREDATAGRAMS_MORE; - next_ = &next; + + queue_entity_ + next.get_queue_entity(); return next; } - IEcatDatagram* get_next() { - return next_; - } - - IEcatDatagram& set_next(IEcatDatagram &next) { - return operator+(next); - } - virtual uint8_t* pack(uint8_t *raw) = 0; virtual uint8_t* unpack(uint8_t *raw) = 0; @@ -60,11 +68,13 @@ public: } TEcatWkc get_all_wkc() { - if(next_ != nullptr) { - return wkc_ + next_->get_all_wkc(); - } else { + queue::QueueEntity* next = queue_entity_.get_next(); + + if(next == nullptr) { return wkc_; } + + return wkc_ + next->get_data()->get_all_wkc(); } protected: @@ -73,7 +83,7 @@ protected: TEcatWkc wkc_; private: - IEcatDatagram *next_{nullptr}; + queue::QueueEntity queue_entity_{this}; }; template @@ -162,37 +172,6 @@ private: } }; -class EcatTelegram : public Handler { -public: - EcatTelegram(Eth& eth) - : eth_{eth} - , tx_flow_{*eth.getTxFlowPtr()} { - eth_.getEthStackPtr()->Register(ETH_PROT_ECAT_LE, this); - } - - virtual int32_t Process(uint8_t *p_data, uint32_t len) override; - - void init(TEthMacPorts port_id) { - port_id_ = port_id; - } - - void transfer(IEcatDatagram& first); - -private: - Eth& eth_; - EthTxFlowIface& tx_flow_; - TEthMacPorts port_id_; - - free_rtos::Semaphore rx_sem_; - - IEcatDatagram *datagram_queue_{nullptr}; - - TEthPkt buffer_out_; - - void pack(); - void unpack(uint8_t *raw); -}; - } diff --git a/components/free_rtos/ethernet_industry/eth_ecat_eeprom.hpp b/components/free_rtos/ethernet_industry/eth_ecat_eeprom.hpp index f32e78f..5e7a10a 100644 --- a/components/free_rtos/ethernet_industry/eth_ecat_eeprom.hpp +++ b/components/free_rtos/ethernet_industry/eth_ecat_eeprom.hpp @@ -11,6 +11,7 @@ #include #include +#include "ethernet_industry/eth_ecat_telegram.hpp" namespace free_rtos { @@ -18,7 +19,7 @@ namespace eeprom { class EEPROM { public: - EEPROM(datagram::EcatTelegram& telegram) + EEPROM(telegram::EcatTelegram& telegram) : telegram_{telegram} { } template @@ -73,7 +74,7 @@ public: } private: - datagram::EcatTelegram& telegram_; + telegram::EcatTelegram& telegram_; }; } diff --git a/components/free_rtos/ethernet_industry/eth_ecat_packer.hpp b/components/free_rtos/ethernet_industry/eth_ecat_packer.hpp new file mode 100644 index 0000000..23a7cf7 --- /dev/null +++ b/components/free_rtos/ethernet_industry/eth_ecat_packer.hpp @@ -0,0 +1,93 @@ +/* + * eth_ecat_packer.hpp + * + * Created on: Jun 5, 2023 + * Author: algin + */ + +#ifndef FREE_RTOS_ETHERNET_INDUSTRY_ETH_ECAT_PACKER_HPP_ +#define FREE_RTOS_ETHERNET_INDUSTRY_ETH_ECAT_PACKER_HPP_ + +#include + +namespace free_rtos { + +struct Padding { + size_t size; +}; + +struct PackFunctorBase { + uint8_t *raw; + + template + void operator()(DataT& data) { + DataT *data_p = new(raw) DataT{data}; + + (void)data_p; + + raw += sizeof(DataT); + } + + void operator()() { } +}; + +// Функтор для обхода и упаковки элементов custom_tuple +struct PackFunctor : public PackFunctorBase { + PackFunctor(uint8_t *raw) + : PackFunctorBase{raw} { } + + using PackFunctorBase::operator (); + + template + void operator()(std::vector& data) { + size_t size = data.size() * sizeof(DataT); + + memcpy(raw, data.data(), size); + + raw += size; + } + + void operator()(Padding& padding) { + raw += padding.size; + } +}; + +struct UnpackFunctorBase { + uint8_t *raw; + + template + void operator()(DataT& data) { + DataT *p_data = reinterpret_cast(raw); + + data = *p_data; + + raw += sizeof(DataT); + } + + void operator()() { } +}; + +// Функтор для обхода и распаковки элементов custom_tuple +struct UnpackFunctor : public UnpackFunctorBase { + UnpackFunctor(uint8_t *raw) + : UnpackFunctorBase{raw} { } + + using UnpackFunctorBase::operator (); + + template + void operator()(std::vector& data) { + size_t size = data.size() * sizeof(DataT); + + memcpy(data.data(), raw, size); + + raw += size; + } + + void operator()(Padding& padding) { + raw += padding.size; + } +}; + +} + +#endif /* FREE_RTOS_ETHERNET_INDUSTRY_ETH_ECAT_PACKER_HPP_ */ diff --git a/components/free_rtos/ethernet_industry/eth_ecat_queue.hpp b/components/free_rtos/ethernet_industry/eth_ecat_queue.hpp new file mode 100644 index 0000000..48fb531 --- /dev/null +++ b/components/free_rtos/ethernet_industry/eth_ecat_queue.hpp @@ -0,0 +1,90 @@ +/* + * eth_ecat_queue.hpp + * + * Created on: Jun 2, 2023 + * Author: algin + */ + +#ifndef FREE_RTOS_ETHERNET_INDUSTRY_ETH_ECAT_QUEUE_HPP_ +#define FREE_RTOS_ETHERNET_INDUSTRY_ETH_ECAT_QUEUE_HPP_ + +#include + +namespace free_rtos { + +namespace queue { + +template +class QueueEntity { +public: + QueueEntity(DataType *data) + : data_{data} { } + + DataType* get_data() { + return data_; + } + + QueueEntity* get_next() { + return next_; + } + + void detach() { + next_ = nullptr; + first_ = this; + last_ = this; + } + + QueueEntity& operator+(QueueEntity& next) { + attach(next); + + //set_next(next); + + return next; + } + + QueueEntity* operator+(QueueEntity *next) { + attach(next); + + //set_next(next); + + return next; + } + +private: + DataType *data_{nullptr}; + + QueueEntity *next_{nullptr}; + QueueEntity *first_{this}; + QueueEntity *last_{this}; + + void set_next(QueueEntity &next) { + next_ = &next; + } + + QueueEntity* get_last() { + return last_; + } + + void set_first(QueueEntity* first) { + first_ = first; + } + + QueueEntity* attach(QueueEntity& next) { + if(this != first_) { + first_ = first_->attach(next); + }else{ + next.set_first(first_); + last_->set_next(next); + //last_ = next.get_last(); + last_ = &next; + } + + return first_; + } +}; + +} + +} + +#endif /* FREE_RTOS_ETHERNET_INDUSTRY_ETH_ECAT_QUEUE_HPP_ */ diff --git a/components/free_rtos/ethernet_industry/eth_ecat_datagram.cpp b/components/free_rtos/ethernet_industry/eth_ecat_telegram.cpp similarity index 77% rename from components/free_rtos/ethernet_industry/eth_ecat_datagram.cpp rename to components/free_rtos/ethernet_industry/eth_ecat_telegram.cpp index e1b49d1..d9a3881 100644 --- a/components/free_rtos/ethernet_industry/eth_ecat_datagram.cpp +++ b/components/free_rtos/ethernet_industry/eth_ecat_telegram.cpp @@ -1,15 +1,15 @@ /* - * eth_ecat_datagram.cpp + * eth_ecat_telegram.cpp * - * Created on: May 2, 2023 + * Created on: Jun 5, 2023 * Author: algin */ -#include "ethernet_industry/eth_ecat_datagram.hpp" +#include "ethernet_industry/eth_ecat_telegram.hpp" namespace free_rtos { -namespace datagram { +namespace telegram { int32_t EcatTelegram::Process(uint8_t *p_data, uint32_t len) { //buffer_in_.length = len + sizeof(TEthFrameHeader); @@ -38,13 +38,13 @@ void EcatTelegram::pack() { .type = static_cast(ec_network::PROTOCOL_TYPE)}}; uint8_t *p_datagram_first = buffer_out_.data + sizeof(TEthFrameHeader) + sizeof(TEcatFrameHeader); uint8_t *p_datagram_last = p_datagram_first; - IEcatDatagram *next = datagram_queue_; + queue::QueueEntity *next = datagram_queue_; (void)p_eth_hdr; (void)p_hdr; while(next != nullptr) { - p_datagram_last = next->pack(p_datagram_last); + p_datagram_last = next->get_data()->pack(p_datagram_last); next = next->get_next(); } @@ -57,19 +57,19 @@ void EcatTelegram::unpack(uint8_t *raw) { TEcatFrameHeader *p_hdr = reinterpret_cast(raw + sizeof(TEthFrameHeader)); uint8_t *p_datagram_first = raw + sizeof(TEthFrameHeader) + sizeof(TEcatFrameHeader); uint8_t *p_datagram_last = p_datagram_first; - IEcatDatagram *next = datagram_queue_; + queue::QueueEntity *next = datagram_queue_; (void)p_eth_hdr; (void)p_hdr; while(next != nullptr) { - p_datagram_last = next->unpack(p_datagram_last); + p_datagram_last = next->get_data()->unpack(p_datagram_last); next = next->get_next(); } } -void EcatTelegram::transfer(IEcatDatagram& first) { - datagram_queue_ = &first; // TODO: Доделать добавление в очередь более одного элемента +void EcatTelegram::transfer(datagram::IEcatDatagram& first) { + datagram_queue_ = &first.get_queue_entity(); // TODO: Доделать добавление в очередь более одного элемента pack(); bool stat = tx_flow_.send(port_id_, buffer_out_.data, buffer_out_.length); diff --git a/components/free_rtos/ethernet_industry/eth_ecat_telegram.hpp b/components/free_rtos/ethernet_industry/eth_ecat_telegram.hpp new file mode 100644 index 0000000..01f6f3f --- /dev/null +++ b/components/free_rtos/ethernet_industry/eth_ecat_telegram.hpp @@ -0,0 +1,55 @@ +/* + * eth_ecat_telegram.hpp + * + * Created on: Jun 5, 2023 + * Author: algin + */ + +#ifndef FREE_RTOS_ETHERNET_INDUSTRY_ETH_ECAT_TELEGRAM_HPP_ +#define FREE_RTOS_ETHERNET_INDUSTRY_ETH_ECAT_TELEGRAM_HPP_ + +#include "ethernet/eth.hpp" + +#include "ethernet_industry/eth_ecat_datagram.hpp" + +namespace free_rtos { + +namespace telegram { + +class EcatTelegram : public Handler { +public: + EcatTelegram(Eth& eth) + : eth_{eth} + , tx_flow_{*eth.getTxFlowPtr()} { + eth_.getEthStackPtr()->Register(ETH_PROT_ECAT_LE, this); + } + + virtual int32_t Process(uint8_t *p_data, uint32_t len) override; + + void init(TEthMacPorts port_id) { + port_id_ = port_id; + } + + void transfer(datagram::IEcatDatagram& first); + +private: + Eth& eth_; + EthTxFlowIface& tx_flow_; + TEthMacPorts port_id_; + + free_rtos::Semaphore rx_sem_; + + queue::QueueEntity *datagram_queue_{nullptr}; + + TEthPkt buffer_out_; + + void pack(); + void unpack(uint8_t *raw); +}; + +} + + +} + +#endif /* FREE_RTOS_ETHERNET_INDUSTRY_ETH_ECAT_TELEGRAM_HPP_ */