Skip to content

Commit

Permalink
apply cpp formatting
Browse files Browse the repository at this point in the history
Signed-off-by: Tim Paine <[email protected]>
  • Loading branch information
timkpaine committed Jul 13, 2024
1 parent cca3466 commit b19ab5e
Show file tree
Hide file tree
Showing 319 changed files with 15,459 additions and 14,626 deletions.
4 changes: 2 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ lint-py:
python -m ruff format --check csp/ examples/ setup.py

lint-cpp:
clang-format --dry-run -Werror -i -style=file `find ./cpp/ -type f -name "*.cpp" -o -name "*.h"`
clang-format --dry-run -Werror -i -style=file `find ./cpp/csp -type f -name "*.cpp" -o -name "*.h"`

lint-docs:
python -m mdformat --check docs/wiki/ README.md examples/
Expand All @@ -57,7 +57,7 @@ fix-py:
python -m ruff format csp/ examples/ setup.py

fix-cpp:
clang-format -i -style=file `find ./cpp/ -type f -name "*.cpp" -o -name "*.h"`
clang-format -i -style=file `find ./cpp/csp -type f -name "*.cpp" -o -name "*.h"`

fix-docs:
python -m mdformat docs/wiki/ README.md examples/
Expand Down
140 changes: 74 additions & 66 deletions cpp/csp/adapters/kafka/KafkaAdapterManager.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,17 @@
#include <csp/adapters/kafka/KafkaConsumer.h>
#include <csp/adapters/kafka/KafkaPublisher.h>
#include <csp/adapters/kafka/KafkaSubscriber.h>
#include <csp/engine/Dictionary.h>
#include <csp/core/Platform.h>
#include <csp/engine/Dictionary.h>

#include <iostream>
#include <librdkafka/rdkafkacpp.h>

namespace csp
{

INIT_CSP_ENUM( csp::adapters::kafka::KafkaStatusMessageType,
"OK",
"MSG_DELIVERY_FAILED",
"MSG_SEND_ERROR",
"MSG_RECV_ERROR"
);
INIT_CSP_ENUM( csp::adapters::kafka::KafkaStatusMessageType, "OK", "MSG_DELIVERY_FAILED", "MSG_SEND_ERROR",
"MSG_RECV_ERROR" );

}

Expand All @@ -26,59 +22,68 @@ namespace csp::adapters::kafka
class DeliveryReportCb : public RdKafka::DeliveryReportCb
{
public:
DeliveryReportCb( KafkaAdapterManager * mgr ) : m_adapterManager( mgr )
DeliveryReportCb( KafkaAdapterManager * mgr )
: m_adapterManager( mgr )
{
}

void dr_cb( RdKafka::Message &message ) final
void dr_cb( RdKafka::Message & message ) final
{
/* If message.err() is non-zero the message delivery failed permanently
* for the message. */
if( message.err() )
{
std::string msg = "KafkaPublisher: Message delivery failed for topic " + message.topic_name() + ". Failure: " + message.errstr();
m_adapterManager -> pushStatus( StatusLevel::ERROR, KafkaStatusMessageType::MSG_DELIVERY_FAILED, msg );
std::string msg = "KafkaPublisher: Message delivery failed for topic " + message.topic_name()
+ ". Failure: " + message.errstr();
m_adapterManager->pushStatus( StatusLevel::ERROR, KafkaStatusMessageType::MSG_DELIVERY_FAILED, msg );
}
}

private:
KafkaAdapterManager * m_adapterManager;
};

class EventCb : public RdKafka::EventCb
{
public:
EventCb( KafkaAdapterManager * mgr ) : m_adapterManager( mgr ) {}
EventCb( KafkaAdapterManager * mgr )
: m_adapterManager( mgr )
{
}

void event_cb( RdKafka::Event & event ) override
{
if( event.type() == RdKafka::Event::EVENT_LOG )
{
if( event.severity() < RdKafka::Event::EVENT_SEVERITY_NOTICE )
{
std::string errmsg = "KafkaConsumer: error " + RdKafka::err2str( ( RdKafka::ErrorCode ) event.err() ) + ". Reason: " + event.str();
m_adapterManager -> pushStatus( StatusLevel::ERROR, KafkaStatusMessageType::GENERIC_ERROR, errmsg );
std::string errmsg = "KafkaConsumer: error " + RdKafka::err2str( (RdKafka::ErrorCode)event.err() )
+ ". Reason: " + event.str();
m_adapterManager->pushStatus( StatusLevel::ERROR, KafkaStatusMessageType::GENERIC_ERROR, errmsg );
}
}
else if( event.type() == RdKafka::Event::EVENT_ERROR )
{
//We shutdown the app if its a fatal error OR if its an authentication issue which has plagued users multiple times
// We shutdown the app if its a fatal error OR if its an authentication issue which has plagued users
// multiple times
if( event.fatal() || event.err() == RdKafka::ErrorCode::ERR__AUTHENTICATION )
m_adapterManager -> forceShutdown( RdKafka::err2str( ( RdKafka::ErrorCode ) event.err() ) + event.str() );
m_adapterManager->forceShutdown( RdKafka::err2str( (RdKafka::ErrorCode)event.err() ) + event.str() );
}
}

private:
KafkaAdapterManager * m_adapterManager;
};

KafkaAdapterManager::KafkaAdapterManager( csp::Engine * engine, const Dictionary & properties ) : AdapterManager( engine ),
m_consumerIdx( 0 ),
m_producerPollThreadActive( false )
KafkaAdapterManager::KafkaAdapterManager( csp::Engine * engine, const Dictionary & properties )
: AdapterManager( engine )
, m_consumerIdx( 0 )
, m_producerPollThreadActive( false )
{
m_maxThreads = properties.get<uint64_t>( "max_threads" );
m_maxThreads = properties.get<uint64_t>( "max_threads" );
m_pollTimeoutMs = properties.get<TimeDelta>( "poll_timeout" ).asMilliseconds();

m_eventCb = std::make_unique<EventCb>( this );
m_eventCb = std::make_unique<EventCb>( this );
m_producerCb = std::make_unique<DeliveryReportCb>( this );

std::string errstr;
Expand All @@ -90,19 +95,19 @@ KafkaAdapterManager::KafkaAdapterManager( csp::Engine * engine, const Dictionary
setConfProperties( m_consumerConf.get(), *properties.get<DictionaryPtr>( "rd_kafka_consumer_conf_properties" ) );
if( properties.exists( "start_offset" ) )
{
//used later in start since we need starttime
// used later in start since we need starttime
m_startOffsetProperty = properties.getUntypedValue( "start_offset" );
}

if( m_consumerConf -> set( "event_cb", m_eventCb.get(), errstr ) != RdKafka::Conf::CONF_OK )
if( m_consumerConf->set( "event_cb", m_eventCb.get(), errstr ) != RdKafka::Conf::CONF_OK )
CSP_THROW( RuntimeException, "Failed to set consumer error cb: " << errstr );

m_producerConf.reset( RdKafka::Conf::create( RdKafka::Conf::CONF_GLOBAL ) );
setConfProperties( m_producerConf.get(), rdKafkaProperties );
setConfProperties( m_producerConf.get(), *properties.get<DictionaryPtr>( "rd_kafka_producer_conf_properties" ) );
if( m_producerConf -> set( "dr_cb", m_producerCb.get(), errstr ) != RdKafka::Conf::CONF_OK )
if( m_producerConf->set( "dr_cb", m_producerCb.get(), errstr ) != RdKafka::Conf::CONF_OK )
CSP_THROW( RuntimeException, "Failed to set producer callback: " << errstr );
if( m_producerConf -> set( "event_cb", m_eventCb.get(), errstr ) != RdKafka::Conf::CONF_OK )
if( m_producerConf->set( "event_cb", m_eventCb.get(), errstr ) != RdKafka::Conf::CONF_OK )
CSP_THROW( RuntimeException, "Failed to set producer error cb: " << errstr );
}

Expand All @@ -112,7 +117,7 @@ KafkaAdapterManager::~KafkaAdapterManager()
if( m_producerPollThreadActive )
{
m_producerPollThreadActive = false;
m_producerPollThread -> join();
m_producerPollThread->join();
}
}

Expand All @@ -122,9 +127,9 @@ void KafkaAdapterManager::setConfProperties( RdKafka::Conf * conf, const Diction

for( auto it = properties.begin(); it != properties.end(); ++it )
{
std::string key = it.key();
std::string key = it.key();
std::string value = properties.get<std::string>( key );
if( conf -> set( key, value, errstr ) != RdKafka::Conf::CONF_OK )
if( conf->set( key, value, errstr ) != RdKafka::Conf::CONF_OK )
CSP_THROW( RuntimeException, "Failed to set property " << key << ": " << errstr );
}
}
Expand All @@ -134,18 +139,18 @@ void KafkaAdapterManager::forceShutdown( const std::string & err )
forceConsumerReplayComplete();
try
{
CSP_THROW( RuntimeException, "Kafka fatal error. " + err );
CSP_THROW( RuntimeException, "Kafka fatal error. " + err );
}
catch( const RuntimeException & )
{
rootEngine() -> shutdown( std::current_exception() );
rootEngine()->shutdown( std::current_exception() );
}
}

void KafkaAdapterManager::forceConsumerReplayComplete()
{
for( auto & consumer : m_consumerVector )
consumer -> forceReplayCompleted();
consumer->forceReplayCompleted();
}

void KafkaAdapterManager::start( DateTime starttime, DateTime endtime )
Expand All @@ -155,29 +160,29 @@ void KafkaAdapterManager::start( DateTime starttime, DateTime endtime )
if( !m_staticPublishers.empty() || !m_dynamicPublishers.empty() )
{
m_producer.reset( RdKafka::Producer::create( m_producerConf.get(), errstr ) );
if ( !m_producer )
if( !m_producer )
{
CSP_THROW( RuntimeException, "Failed to create producer: " << errstr );
}
}

// start all consumers
for( auto & it : m_consumerVector )
it -> start( starttime );
it->start( starttime );

// start all publishers
for( auto & it : m_staticPublishers )
it.second -> start( m_producer );
it.second->start( m_producer );

for( auto & it : m_dynamicPublishers )
it -> start( m_producer );
it->start( m_producer );

AdapterManager::start( starttime, endtime );

if( !m_staticPublishers.empty() || !m_dynamicPublishers.empty() )
{
m_producerPollThreadActive = true;
m_producerPollThread = std::make_unique<std::thread>( [ this ](){ pollProducers(); } );
m_producerPollThread = std::make_unique<std::thread>( [this]() { pollProducers(); } );
}
}

Expand All @@ -187,20 +192,20 @@ void KafkaAdapterManager::stop()

// stop all consumers
for( auto & it : m_consumerVector )
it -> stop();
it->stop();

if( m_producerPollThreadActive )
{
m_producerPollThreadActive = false;
m_producerPollThread -> join();
m_producerPollThread->join();
}

// stop all publishers
for( auto & it : m_staticPublishers )
it.second -> stop();
it.second->stop();

for( auto & it : m_dynamicPublishers )
it -> stop();
it->stop();

m_staticPublishers.clear();
m_dynamicPublishers.clear();
Expand All @@ -218,44 +223,46 @@ void KafkaAdapterManager::pollProducers()
{
while( m_producerPollThreadActive )
{
m_producer -> poll( 1000 );
m_producer->poll( 1000 );
}

try
{
while( true )
{
auto rc = m_producer -> flush( 10000 );
auto rc = m_producer->flush( 10000 );
if( !rc )
break;

if( rc && rc != RdKafka::ERR__TIMED_OUT )
CSP_THROW( RuntimeException, "KafkaProducer failed to flush pending msgs on shutdown: " << RdKafka::err2str( rc ) );
CSP_THROW( RuntimeException,
"KafkaProducer failed to flush pending msgs on shutdown: " << RdKafka::err2str( rc ) );
}
}
catch( ... )
{
rootEngine() -> shutdown( std::current_exception() );
rootEngine()->shutdown( std::current_exception() );
}
}

PushInputAdapter * KafkaAdapterManager::getInputAdapter( CspTypePtr & type, PushMode pushMode, const Dictionary & properties )
PushInputAdapter * KafkaAdapterManager::getInputAdapter( CspTypePtr & type, PushMode pushMode,
const Dictionary & properties )
{
std::string topic = properties.get<std::string>( "topic" );
std::string key = properties.get<std::string>( "key" );
KafkaSubscriber * subscriber = this -> getSubscriber( topic, key, properties );
return subscriber -> getInputAdapter( type, pushMode, properties );
std::string topic = properties.get<std::string>( "topic" );
std::string key = properties.get<std::string>( "key" );
KafkaSubscriber * subscriber = this->getSubscriber( topic, key, properties );
return subscriber->getInputAdapter( type, pushMode, properties );
}

OutputAdapter * KafkaAdapterManager::getOutputAdapter( CspTypePtr & type, const Dictionary & properties )
{
std::string topic = properties.get<std::string>( "topic" );
try
{
auto key = properties.get<std::string>( "key" );
auto pair = TopicKeyPair( topic, key );
KafkaPublisher * publisher = this -> getStaticPublisher( pair, properties );
return publisher -> getOutputAdapter( type, properties, key );
auto key = properties.get<std::string>( "key" );
auto pair = TopicKeyPair( topic, key );
KafkaPublisher * publisher = this->getStaticPublisher( pair, properties );
return publisher->getOutputAdapter( type, properties, key );
}
catch( TypeError & e )
{
Expand All @@ -264,8 +271,8 @@ OutputAdapter * KafkaAdapterManager::getOutputAdapter( CspTypePtr & type, const
for( auto & it : key )
keyFields.emplace_back( std::get<std::string>( it._data ) );

KafkaPublisher * publisher = this -> getDynamicPublisher( topic, properties );
return publisher -> getOutputAdapter( type, properties, keyFields );
KafkaPublisher * publisher = this->getDynamicPublisher( topic, properties );
return publisher->getOutputAdapter( type, properties, keyFields );
}
}

Expand All @@ -276,37 +283,38 @@ KafkaConsumer * KafkaAdapterManager::getConsumer( const std::string & topic, con
// If we have reached m_maxThreads, then round-robin the topic onto a consumer (and insert it into the map)
if( m_consumerMap.find( topic ) != m_consumerMap.end() )
{
return m_consumerMap[ topic ].get();
return m_consumerMap[topic].get();
}
if( m_consumerVector.size() < m_maxThreads )
{
auto consumer = std::make_shared<KafkaConsumer>( this, properties );
m_consumerVector.emplace_back( consumer );
m_consumerMap.emplace( topic, consumer );
return m_consumerMap[ topic ].get();
return m_consumerMap[topic].get();
}

auto consumer = m_consumerVector[ m_consumerIdx++ ];
auto consumer = m_consumerVector[m_consumerIdx++];
m_consumerMap.emplace( topic, consumer );
if( m_consumerIdx >= m_maxThreads )
m_consumerIdx = 0;
return consumer.get();
}

KafkaSubscriber * KafkaAdapterManager::getSubscriber( const std::string & topic, const std::string & key, const Dictionary & properties )
KafkaSubscriber * KafkaAdapterManager::getSubscriber( const std::string & topic, const std::string & key,
const Dictionary & properties )
{
auto pair = TopicKeyPair( topic, key );
auto rv = m_subscribers.emplace( pair, nullptr );
auto rv = m_subscribers.emplace( pair, nullptr );

if( rv.second )
{
std::unique_ptr<KafkaSubscriber> subscriber( new KafkaSubscriber( this, properties ) );
rv.first -> second = std::move( subscriber );
rv.first->second = std::move( subscriber );

this -> getConsumer( topic, properties ) -> addSubscriber( topic, key, rv.first -> second.get() );
this->getConsumer( topic, properties )->addSubscriber( topic, key, rv.first->second.get() );
}

return rv.first -> second.get();
return rv.first->second.get();
}

// for static (string) keys, we create one publisher instance per <topic, key> pair
Expand All @@ -317,10 +325,10 @@ KafkaPublisher * KafkaAdapterManager::getStaticPublisher( const TopicKeyPair & p
if( rv.second )
{
std::unique_ptr<KafkaPublisher> publisher( new KafkaPublisher( this, properties, pair.first ) );
rv.first -> second = std::move( publisher );
rv.first->second = std::move( publisher );
}

KafkaPublisher * p = rv.first -> second.get();
KafkaPublisher * p = rv.first->second.get();
return p;
}

Expand All @@ -332,4 +340,4 @@ KafkaPublisher * KafkaAdapterManager::getDynamicPublisher( const std::string & t
return p;
}

}
} // namespace csp::adapters::kafka
Loading

0 comments on commit b19ab5e

Please sign in to comment.