/ components / driver / uart.c
uart.c
   1  // Copyright 2015-2019 Espressif Systems (Shanghai) PTE LTD
   2  //
   3  // Licensed under the Apache License, Version 2.0 (the "License");
   4  // you may not use this file except in compliance with the License.
   5  // You may obtain a copy of the License at
   6  
   7  //     http://www.apache.org/licenses/LICENSE-2.0
   8  //
   9  // Unless required by applicable law or agreed to in writing, software
  10  // distributed under the License is distributed on an "AS IS" BASIS,
  11  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12  // See the License for the specific language governing permissions and
  13  // limitations under the License.
  14  #include <string.h>
  15  #include "esp_types.h"
  16  #include "esp_attr.h"
  17  #include "esp_intr_alloc.h"
  18  #include "esp_log.h"
  19  #include "esp_err.h"
  20  #include "malloc.h"
  21  #include "freertos/FreeRTOS.h"
  22  #include "freertos/semphr.h"
  23  #include "freertos/xtensa_api.h"
  24  #include "freertos/ringbuf.h"
  25  #include "hal/uart_hal.h"
  26  #include "soc/uart_periph.h"
  27  #include "driver/uart.h"
  28  #include "driver/gpio.h"
  29  #include "driver/uart_select.h"
  30  #include "driver/periph_ctrl.h"
  31  #include "sdkconfig.h"
  32  #include "esp_rom_gpio.h"
  33  
  34  #if CONFIG_IDF_TARGET_ESP32
  35  #include "esp32/clk.h"
  36  #elif CONFIG_IDF_TARGET_ESP32S2
  37  #include "esp32s2/clk.h"
  38  #elif CONFIG_IDF_TARGET_ESP32S3
  39  #include "esp32s3/clk.h"
  40  #endif
  41  
  42  #ifdef CONFIG_UART_ISR_IN_IRAM
  43  #define UART_ISR_ATTR IRAM_ATTR
  44  #else
  45  #define UART_ISR_ATTR
  46  #endif
  47  
  48  #define XOFF (0x13)
  49  #define XON (0x11)
  50  
  51  static const char* UART_TAG = "uart";
  52  #define UART_CHECK(a, str, ret_val) \
  53      if (!(a)) { \
  54          ESP_LOGE(UART_TAG,"%s(%d): %s", __FUNCTION__, __LINE__, str); \
  55          return (ret_val); \
  56      }
  57  
  58  #define UART_EMPTY_THRESH_DEFAULT       (10)
  59  #define UART_FULL_THRESH_DEFAULT        (120)
  60  #define UART_TOUT_THRESH_DEFAULT        (10)
  61  #define UART_CLKDIV_FRAG_BIT_WIDTH      (3)
  62  #define UART_TX_IDLE_NUM_DEFAULT        (0)
  63  #define UART_PATTERN_DET_QLEN_DEFAULT   (10)
  64  #define UART_MIN_WAKEUP_THRESH          (UART_LL_MIN_WAKEUP_THRESH)
  65  
  66  #define UART_INTR_CONFIG_FLAG ((UART_INTR_RXFIFO_FULL) \
  67                              | (UART_INTR_RXFIFO_TOUT) \
  68                              | (UART_INTR_RXFIFO_OVF) \
  69                              | (UART_INTR_BRK_DET) \
  70                              | (UART_INTR_PARITY_ERR))
  71  
  72  #define UART_ENTER_CRITICAL_ISR(mux)    portENTER_CRITICAL_ISR(mux)
  73  #define UART_EXIT_CRITICAL_ISR(mux)     portEXIT_CRITICAL_ISR(mux)
  74  #define UART_ENTER_CRITICAL(mux)    portENTER_CRITICAL(mux)
  75  #define UART_EXIT_CRITICAL(mux)     portEXIT_CRITICAL(mux)
  76  
  77  
  78  // Check actual UART mode set
  79  #define UART_IS_MODE_SET(uart_number, mode) ((p_uart_obj[uart_number]->uart_mode == mode))
  80  
  81  #define UART_CONTEX_INIT_DEF(uart_num) {\
  82      .hal.dev = UART_LL_GET_HW(uart_num),\
  83      .spinlock = portMUX_INITIALIZER_UNLOCKED,\
  84      .hw_enabled = false,\
  85  }
  86  
  87  typedef struct {
  88      uart_event_type_t type;        /*!< UART TX data type */
  89      struct {
  90          int brk_len;
  91          size_t size;
  92          uint8_t data[0];
  93      } tx_data;
  94  } uart_tx_data_t;
  95  
  96  typedef struct {
  97      int wr;
  98      int rd;
  99      int len;
 100      int* data;
 101  } uart_pat_rb_t;
 102  
 103  typedef struct {
 104      uart_port_t uart_num;               /*!< UART port number*/
 105      int queue_size;                     /*!< UART event queue size*/
 106      QueueHandle_t xQueueUart;           /*!< UART queue handler*/
 107      intr_handle_t intr_handle;          /*!< UART interrupt handle*/
 108      uart_mode_t uart_mode;              /*!< UART controller actual mode set by uart_set_mode() */
 109      bool coll_det_flg;                  /*!< UART collision detection flag */
 110      bool rx_always_timeout_flg;         /*!< UART always detect rx timeout flag */
 111  
 112      //rx parameters
 113      int rx_buffered_len;                  /*!< UART cached data length */
 114      SemaphoreHandle_t rx_mux;           /*!< UART RX data mutex*/
 115      int rx_buf_size;                    /*!< RX ring buffer size */
 116      RingbufHandle_t rx_ring_buf;        /*!< RX ring buffer handler*/
 117      bool rx_buffer_full_flg;            /*!< RX ring buffer full flag. */
 118      int rx_cur_remain;                  /*!< Data number that waiting to be read out in ring buffer item*/
 119      uint8_t* rx_ptr;                    /*!< pointer to the current data in ring buffer*/
 120      uint8_t* rx_head_ptr;               /*!< pointer to the head of RX item*/
 121      uint8_t rx_data_buf[SOC_UART_FIFO_LEN]; /*!< Data buffer to stash FIFO data*/
 122      uint8_t rx_stash_len;               /*!< stashed data length.(When using flow control, after reading out FIFO data, if we fail to push to buffer, we can just stash them.) */
 123      uart_pat_rb_t rx_pattern_pos;
 124  
 125      //tx parameters
 126      SemaphoreHandle_t tx_fifo_sem;      /*!< UART TX FIFO semaphore*/
 127      SemaphoreHandle_t tx_mux;           /*!< UART TX mutex*/
 128      SemaphoreHandle_t tx_done_sem;      /*!< UART TX done semaphore*/
 129      SemaphoreHandle_t tx_brk_sem;       /*!< UART TX send break done semaphore*/
 130      int tx_buf_size;                    /*!< TX ring buffer size */
 131      RingbufHandle_t tx_ring_buf;        /*!< TX ring buffer handler*/
 132      bool tx_waiting_fifo;               /*!< this flag indicates that some task is waiting for FIFO empty interrupt, used to send all data without any data buffer*/
 133      uint8_t* tx_ptr;                    /*!< TX data pointer to push to FIFO in TX buffer mode*/
 134      uart_tx_data_t* tx_head;            /*!< TX data pointer to head of the current buffer in TX ring buffer*/
 135      uint32_t tx_len_tot;                /*!< Total length of current item in ring buffer*/
 136      uint32_t tx_len_cur;
 137      uint8_t tx_brk_flg;                 /*!< Flag to indicate to send a break signal in the end of the item sending procedure */
 138      uint8_t tx_brk_len;                 /*!< TX break signal cycle length/number */
 139      uint8_t tx_waiting_brk;             /*!< Flag to indicate that TX FIFO is ready to send break signal after FIFO is empty, do not push data into TX FIFO right now.*/
 140      uart_select_notif_callback_t uart_select_notif_callback; /*!< Notification about select() events */
 141  } uart_obj_t;
 142  
 143  typedef struct {
 144      uart_hal_context_t hal;        /*!< UART hal context*/
 145      portMUX_TYPE spinlock;
 146      bool hw_enabled;
 147  } uart_context_t;
 148  
 149  static uart_obj_t *p_uart_obj[UART_NUM_MAX] = {0};
 150  
 151  static uart_context_t uart_context[UART_NUM_MAX] = {
 152      UART_CONTEX_INIT_DEF(UART_NUM_0),
 153      UART_CONTEX_INIT_DEF(UART_NUM_1),
 154  #if UART_NUM_MAX > 2
 155      UART_CONTEX_INIT_DEF(UART_NUM_2),
 156  #endif
 157  };
 158  
 159  static portMUX_TYPE uart_selectlock = portMUX_INITIALIZER_UNLOCKED;
 160  
 161  static void uart_module_enable(uart_port_t uart_num)
 162  {
 163      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 164      if (uart_context[uart_num].hw_enabled != true) {
 165          if (uart_num != CONFIG_ESP_CONSOLE_UART_NUM) {
 166              periph_module_reset(uart_periph_signal[uart_num].module);
 167          }
 168          periph_module_enable(uart_periph_signal[uart_num].module);
 169          uart_context[uart_num].hw_enabled = true;
 170      }
 171      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 172  }
 173  
 174  static void uart_module_disable(uart_port_t uart_num)
 175  {
 176      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 177      if (uart_context[uart_num].hw_enabled != false) {
 178          if (uart_num != CONFIG_ESP_CONSOLE_UART_NUM ) {
 179              periph_module_disable(uart_periph_signal[uart_num].module);
 180          }
 181          uart_context[uart_num].hw_enabled = false;
 182      }
 183      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 184  }
 185  
 186  esp_err_t uart_set_word_length(uart_port_t uart_num, uart_word_length_t data_bit)
 187  {
 188      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 189      UART_CHECK((data_bit < UART_DATA_BITS_MAX), "data bit error", ESP_FAIL);
 190      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 191      uart_hal_set_data_bit_num(&(uart_context[uart_num].hal), data_bit);
 192      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 193      return ESP_OK;
 194  }
 195  
 196  esp_err_t uart_get_word_length(uart_port_t uart_num, uart_word_length_t* data_bit)
 197  {
 198      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 199      uart_hal_get_data_bit_num(&(uart_context[uart_num].hal), data_bit);
 200      return ESP_OK;
 201  }
 202  
 203  esp_err_t uart_set_stop_bits(uart_port_t uart_num, uart_stop_bits_t stop_bit)
 204  {
 205      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 206      UART_CHECK((stop_bit < UART_STOP_BITS_MAX), "stop bit error", ESP_FAIL);
 207      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 208      uart_hal_set_stop_bits(&(uart_context[uart_num].hal), stop_bit);
 209      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 210      return ESP_OK;
 211  }
 212  
 213  esp_err_t uart_get_stop_bits(uart_port_t uart_num, uart_stop_bits_t* stop_bit)
 214  {
 215      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 216      uart_hal_get_stop_bits(&(uart_context[uart_num].hal), stop_bit);
 217      return ESP_OK;
 218  }
 219  
 220  esp_err_t uart_set_parity(uart_port_t uart_num, uart_parity_t parity_mode)
 221  {
 222      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 223      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 224      uart_hal_set_parity(&(uart_context[uart_num].hal), parity_mode);
 225      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 226      return ESP_OK;
 227  }
 228  
 229  esp_err_t uart_get_parity(uart_port_t uart_num, uart_parity_t* parity_mode)
 230  {
 231      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 232      uart_hal_get_parity(&(uart_context[uart_num].hal), parity_mode);
 233      return ESP_OK;
 234  }
 235  
 236  esp_err_t uart_set_baudrate(uart_port_t uart_num, uint32_t baud_rate)
 237  {
 238      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 239      uart_sclk_t source_clk = 0;
 240      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 241      uart_hal_get_sclk(&(uart_context[uart_num].hal), &source_clk);
 242      uart_hal_set_baudrate(&(uart_context[uart_num].hal), source_clk, baud_rate);
 243      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 244      return ESP_OK;
 245  }
 246  
 247  esp_err_t uart_get_baudrate(uart_port_t uart_num, uint32_t *baudrate)
 248  {
 249      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 250      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 251      uart_hal_get_baudrate(&(uart_context[uart_num].hal), baudrate);
 252      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 253      return ESP_OK;
 254  }
 255  
 256  esp_err_t uart_set_line_inverse(uart_port_t uart_num, uint32_t inverse_mask)
 257  {
 258      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 259      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 260      uart_hal_inverse_signal(&(uart_context[uart_num].hal), inverse_mask);
 261      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 262      return ESP_OK;
 263  }
 264  
 265  esp_err_t uart_set_sw_flow_ctrl(uart_port_t uart_num, bool enable,  uint8_t rx_thresh_xon,  uint8_t rx_thresh_xoff)
 266  {
 267      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 268      UART_CHECK((rx_thresh_xon < SOC_UART_FIFO_LEN), "rx flow xon thresh error", ESP_FAIL);
 269      UART_CHECK((rx_thresh_xoff < SOC_UART_FIFO_LEN), "rx flow xon thresh error", ESP_FAIL);
 270      uart_sw_flowctrl_t sw_flow_ctl = {
 271          .xon_char = XON,
 272          .xoff_char = XOFF,
 273          .xon_thrd = rx_thresh_xon,
 274          .xoff_thrd = rx_thresh_xoff,
 275      };
 276      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 277      uart_hal_set_sw_flow_ctrl(&(uart_context[uart_num].hal), &sw_flow_ctl, enable);
 278      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 279      return ESP_OK;
 280  }
 281  
 282  esp_err_t uart_set_hw_flow_ctrl(uart_port_t uart_num, uart_hw_flowcontrol_t flow_ctrl, uint8_t rx_thresh)
 283  {
 284      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 285      UART_CHECK((rx_thresh < SOC_UART_FIFO_LEN), "rx flow thresh error", ESP_FAIL);
 286      UART_CHECK((flow_ctrl < UART_HW_FLOWCTRL_MAX), "hw_flowctrl mode error", ESP_FAIL);
 287      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 288      uart_hal_set_hw_flow_ctrl(&(uart_context[uart_num].hal), flow_ctrl, rx_thresh);
 289      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 290      return ESP_OK;
 291  }
 292  
 293  esp_err_t uart_get_hw_flow_ctrl(uart_port_t uart_num, uart_hw_flowcontrol_t* flow_ctrl)
 294  {
 295      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL)
 296      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 297      uart_hal_get_hw_flow_ctrl(&(uart_context[uart_num].hal), flow_ctrl);
 298      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 299      return ESP_OK;
 300  }
 301  
 302  esp_err_t UART_ISR_ATTR uart_clear_intr_status(uart_port_t uart_num, uint32_t clr_mask)
 303  {
 304      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 305      uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), clr_mask);
 306      return ESP_OK;
 307  }
 308  
 309  esp_err_t uart_enable_intr_mask(uart_port_t uart_num, uint32_t enable_mask)
 310  {
 311      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 312      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 313      uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), enable_mask);
 314      uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), enable_mask);
 315      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 316      return ESP_OK;
 317  }
 318  
 319  esp_err_t uart_disable_intr_mask(uart_port_t uart_num, uint32_t disable_mask)
 320  {
 321      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 322      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 323      uart_hal_disable_intr_mask(&(uart_context[uart_num].hal), disable_mask);
 324      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 325      return ESP_OK;
 326  }
 327  
 328  static esp_err_t uart_pattern_link_free(uart_port_t uart_num)
 329  {
 330      if (p_uart_obj[uart_num]->rx_pattern_pos.data != NULL) {
 331          int* pdata = p_uart_obj[uart_num]->rx_pattern_pos.data;
 332          UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 333          p_uart_obj[uart_num]->rx_pattern_pos.data = NULL;
 334          p_uart_obj[uart_num]->rx_pattern_pos.wr = 0;
 335          p_uart_obj[uart_num]->rx_pattern_pos.rd = 0;
 336          UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 337          free(pdata);
 338      }
 339      return ESP_OK;
 340  }
 341  
 342  static esp_err_t UART_ISR_ATTR uart_pattern_enqueue(uart_port_t uart_num, int pos)
 343  {
 344      esp_err_t ret = ESP_OK;
 345      uart_pat_rb_t* p_pos = &p_uart_obj[uart_num]->rx_pattern_pos;
 346      int next = p_pos->wr + 1;
 347      if (next >= p_pos->len) {
 348          next = 0;
 349      }
 350      if (next == p_pos->rd) {
 351          ESP_EARLY_LOGW(UART_TAG, "Fail to enqueue pattern position, pattern queue is full.");
 352          ret = ESP_FAIL;
 353      } else {
 354          p_pos->data[p_pos->wr] = pos;
 355          p_pos->wr = next;
 356          ret = ESP_OK;
 357      }
 358      return ret;
 359  }
 360  
 361  static esp_err_t uart_pattern_dequeue(uart_port_t uart_num)
 362  {
 363      if(p_uart_obj[uart_num]->rx_pattern_pos.data == NULL) {
 364          return ESP_ERR_INVALID_STATE;
 365      } else {
 366          esp_err_t ret = ESP_OK;
 367          uart_pat_rb_t* p_pos = &p_uart_obj[uart_num]->rx_pattern_pos;
 368          if (p_pos->rd == p_pos->wr) {
 369              ret = ESP_FAIL;
 370          } else {
 371              p_pos->rd++;
 372          }
 373          if (p_pos->rd >= p_pos->len) {
 374              p_pos->rd = 0;
 375          }
 376          return ret;
 377      }
 378  }
 379  
 380  static esp_err_t uart_pattern_queue_update(uart_port_t uart_num, int diff_len)
 381  {
 382      uart_pat_rb_t* p_pos = &p_uart_obj[uart_num]->rx_pattern_pos;
 383      int rd = p_pos->rd;
 384      while(rd != p_pos->wr) {
 385          p_pos->data[rd] -= diff_len;
 386          int rd_rec = rd;
 387          rd ++;
 388          if (rd >= p_pos->len) {
 389              rd = 0;
 390          }
 391          if (p_pos->data[rd_rec] < 0) {
 392              p_pos->rd = rd;
 393          }
 394      }
 395      return ESP_OK;
 396  }
 397  
 398  int uart_pattern_pop_pos(uart_port_t uart_num)
 399  {
 400      UART_CHECK((p_uart_obj[uart_num]), "uart driver error", (-1));
 401      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 402      uart_pat_rb_t* pat_pos = &p_uart_obj[uart_num]->rx_pattern_pos;
 403      int pos = -1;
 404      if (pat_pos != NULL && pat_pos->rd != pat_pos->wr) {
 405          pos = pat_pos->data[pat_pos->rd];
 406          uart_pattern_dequeue(uart_num);
 407      }
 408      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 409      return pos;
 410  }
 411  
 412  int uart_pattern_get_pos(uart_port_t uart_num)
 413  {
 414      UART_CHECK((p_uart_obj[uart_num]), "uart driver error", (-1));
 415      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 416      uart_pat_rb_t* pat_pos = &p_uart_obj[uart_num]->rx_pattern_pos;
 417      int pos = -1;
 418      if (pat_pos != NULL && pat_pos->rd != pat_pos->wr) {
 419          pos = pat_pos->data[pat_pos->rd];
 420      }
 421      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 422      return pos;
 423  }
 424  
 425  esp_err_t uart_pattern_queue_reset(uart_port_t uart_num, int queue_length)
 426  {
 427      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 428      UART_CHECK((p_uart_obj[uart_num]), "uart driver error", ESP_ERR_INVALID_STATE);
 429  
 430      int* pdata = (int*) malloc(queue_length * sizeof(int));
 431      if(pdata == NULL) {
 432          return ESP_ERR_NO_MEM;
 433      }
 434      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 435      int* ptmp = p_uart_obj[uart_num]->rx_pattern_pos.data;
 436      p_uart_obj[uart_num]->rx_pattern_pos.data = pdata;
 437      p_uart_obj[uart_num]->rx_pattern_pos.len = queue_length;
 438      p_uart_obj[uart_num]->rx_pattern_pos.rd = 0;
 439      p_uart_obj[uart_num]->rx_pattern_pos.wr = 0;
 440      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 441      free(ptmp);
 442      return ESP_OK;
 443  }
 444  
 445  #if CONFIG_IDF_TARGET_ESP32
 446  esp_err_t uart_enable_pattern_det_intr(uart_port_t uart_num, char pattern_chr, uint8_t chr_num, int chr_tout, int post_idle, int pre_idle)
 447  {
 448      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 449      UART_CHECK(chr_tout >= 0 && chr_tout <= UART_RX_GAP_TOUT_V, "uart pattern set error\n", ESP_FAIL);
 450      UART_CHECK(post_idle >= 0 && post_idle <= UART_POST_IDLE_NUM_V, "uart pattern set error\n", ESP_FAIL);
 451      UART_CHECK(pre_idle >= 0 && pre_idle <= UART_PRE_IDLE_NUM_V, "uart pattern set error\n", ESP_FAIL);
 452      uart_at_cmd_t at_cmd = {0};
 453      at_cmd.cmd_char = pattern_chr;
 454      at_cmd.char_num = chr_num;
 455      at_cmd.gap_tout = chr_tout;
 456      at_cmd.pre_idle = pre_idle;
 457      at_cmd.post_idle = post_idle;
 458      uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_CMD_CHAR_DET);
 459      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 460      uart_hal_set_at_cmd_char(&(uart_context[uart_num].hal), &at_cmd);
 461      uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_CMD_CHAR_DET);
 462      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 463      return ESP_OK;
 464  }
 465  #endif
 466  
 467  esp_err_t uart_enable_pattern_det_baud_intr(uart_port_t uart_num, char pattern_chr, uint8_t chr_num, int chr_tout, int post_idle, int pre_idle)
 468  {
 469      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 470      UART_CHECK(chr_tout >= 0 && chr_tout <= UART_RX_GAP_TOUT_V, "uart pattern set error\n", ESP_FAIL);
 471      UART_CHECK(post_idle >= 0 && post_idle <= UART_POST_IDLE_NUM_V, "uart pattern set error\n", ESP_FAIL);
 472      UART_CHECK(pre_idle >= 0 && pre_idle <= UART_PRE_IDLE_NUM_V, "uart pattern set error\n", ESP_FAIL);
 473      uart_at_cmd_t at_cmd = {0};
 474      at_cmd.cmd_char = pattern_chr;
 475      at_cmd.char_num = chr_num;
 476  
 477  #if CONFIG_IDF_TARGET_ESP32
 478      int apb_clk_freq = 0;
 479      uint32_t uart_baud = 0;
 480      uint32_t uart_div = 0;
 481      uart_get_baudrate(uart_num, &uart_baud);
 482      apb_clk_freq = esp_clk_apb_freq();
 483      uart_div = apb_clk_freq / uart_baud;
 484  
 485      at_cmd.gap_tout = chr_tout * uart_div;
 486      at_cmd.pre_idle = pre_idle * uart_div;
 487      at_cmd.post_idle = post_idle * uart_div;
 488  #elif CONFIG_IDF_TARGET_ESP32S2
 489      at_cmd.gap_tout = chr_tout;
 490      at_cmd.pre_idle = pre_idle;
 491      at_cmd.post_idle = post_idle;
 492  #endif
 493      uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_CMD_CHAR_DET);
 494      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 495      uart_hal_set_at_cmd_char(&(uart_context[uart_num].hal), &at_cmd);
 496      uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_CMD_CHAR_DET);
 497      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 498      return ESP_OK;
 499  }
 500  
 501  
 502  esp_err_t uart_disable_pattern_det_intr(uart_port_t uart_num)
 503  {
 504      return uart_disable_intr_mask(uart_num, UART_INTR_CMD_CHAR_DET);
 505  }
 506  
 507  esp_err_t uart_enable_rx_intr(uart_port_t uart_num)
 508  {
 509      return uart_enable_intr_mask(uart_num, UART_INTR_RXFIFO_FULL|UART_INTR_RXFIFO_TOUT);
 510  }
 511  
 512  esp_err_t uart_disable_rx_intr(uart_port_t uart_num)
 513  {
 514      return uart_disable_intr_mask(uart_num, UART_INTR_RXFIFO_FULL|UART_INTR_RXFIFO_TOUT);
 515  }
 516  
 517  esp_err_t uart_disable_tx_intr(uart_port_t uart_num)
 518  {
 519      return uart_disable_intr_mask(uart_num, UART_INTR_TXFIFO_EMPTY);
 520  }
 521  
 522  esp_err_t uart_enable_tx_intr(uart_port_t uart_num, int enable, int thresh)
 523  {
 524      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 525      UART_CHECK((thresh < SOC_UART_FIFO_LEN), "empty intr threshold error", ESP_FAIL);
 526      uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TXFIFO_EMPTY);
 527      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 528      uart_hal_set_txfifo_empty_thr(&(uart_context[uart_num].hal), thresh);
 529      uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TXFIFO_EMPTY);
 530      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 531      return ESP_OK;
 532  }
 533  
 534  esp_err_t uart_isr_register(uart_port_t uart_num, void (*fn)(void*), void * arg, int intr_alloc_flags,  uart_isr_handle_t *handle)
 535  {
 536      int ret;
 537      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 538      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 539      ret=esp_intr_alloc(uart_periph_signal[uart_num].irq, intr_alloc_flags, fn, arg, handle);
 540      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 541      return ret;
 542  }
 543  
 544  esp_err_t uart_isr_free(uart_port_t uart_num)
 545  {
 546      esp_err_t ret;
 547      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 548      UART_CHECK((p_uart_obj[uart_num]), "uart driver error", ESP_FAIL);
 549      UART_CHECK((p_uart_obj[uart_num]->intr_handle != NULL), "uart driver error", ESP_ERR_INVALID_ARG);
 550      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 551      ret=esp_intr_free(p_uart_obj[uart_num]->intr_handle);
 552      p_uart_obj[uart_num]->intr_handle=NULL;
 553      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 554      return ret;
 555  }
 556  
 557  //internal signal can be output to multiple GPIO pads
 558  //only one GPIO pad can connect with input signal
 559  esp_err_t uart_set_pin(uart_port_t uart_num, int tx_io_num, int rx_io_num, int rts_io_num, int cts_io_num)
 560  {
 561      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 562      UART_CHECK((tx_io_num < 0 || (GPIO_IS_VALID_OUTPUT_GPIO(tx_io_num))), "tx_io_num error", ESP_FAIL);
 563      UART_CHECK((rx_io_num < 0 || (GPIO_IS_VALID_GPIO(rx_io_num))), "rx_io_num error", ESP_FAIL);
 564      UART_CHECK((rts_io_num < 0 || (GPIO_IS_VALID_OUTPUT_GPIO(rts_io_num))), "rts_io_num error", ESP_FAIL);
 565      UART_CHECK((cts_io_num < 0 || (GPIO_IS_VALID_GPIO(cts_io_num))), "cts_io_num error", ESP_FAIL);
 566  
 567      if(tx_io_num >= 0) {
 568          PIN_FUNC_SELECT(GPIO_PIN_MUX_REG[tx_io_num], PIN_FUNC_GPIO);
 569          gpio_set_level(tx_io_num, 1);
 570          esp_rom_gpio_connect_out_signal(tx_io_num, uart_periph_signal[uart_num].tx_sig, 0, 0);
 571      }
 572      if(rx_io_num >= 0) {
 573          PIN_FUNC_SELECT(GPIO_PIN_MUX_REG[rx_io_num], PIN_FUNC_GPIO);
 574          gpio_set_pull_mode(rx_io_num, GPIO_PULLUP_ONLY);
 575          gpio_set_direction(rx_io_num, GPIO_MODE_INPUT);
 576          esp_rom_gpio_connect_in_signal(rx_io_num, uart_periph_signal[uart_num].rx_sig, 0);
 577      }
 578      if(rts_io_num >= 0) {
 579          PIN_FUNC_SELECT(GPIO_PIN_MUX_REG[rts_io_num], PIN_FUNC_GPIO);
 580          gpio_set_direction(rts_io_num, GPIO_MODE_OUTPUT);
 581          esp_rom_gpio_connect_out_signal(rts_io_num, uart_periph_signal[uart_num].rts_sig, 0, 0);
 582      }
 583      if(cts_io_num >= 0) {
 584          PIN_FUNC_SELECT(GPIO_PIN_MUX_REG[cts_io_num], PIN_FUNC_GPIO);
 585          gpio_set_pull_mode(cts_io_num, GPIO_PULLUP_ONLY);
 586          gpio_set_direction(cts_io_num, GPIO_MODE_INPUT);
 587          esp_rom_gpio_connect_in_signal(cts_io_num, uart_periph_signal[uart_num].cts_sig, 0);
 588      }
 589      return ESP_OK;
 590  }
 591  
 592  esp_err_t uart_set_rts(uart_port_t uart_num, int level)
 593  {
 594      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 595      UART_CHECK((!uart_hal_is_hw_rts_en(&(uart_context[uart_num].hal))), "disable hw flowctrl before using sw control", ESP_FAIL);
 596      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 597      uart_hal_set_rts(&(uart_context[uart_num].hal), level);
 598      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 599      return ESP_OK;
 600  }
 601  
 602  esp_err_t uart_set_dtr(uart_port_t uart_num, int level)
 603  {
 604      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 605      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 606      uart_hal_set_dtr(&(uart_context[uart_num].hal), level);
 607      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 608      return ESP_OK;
 609  }
 610  
 611  esp_err_t uart_set_tx_idle_num(uart_port_t uart_num, uint16_t idle_num)
 612  {
 613      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 614      UART_CHECK((idle_num <= UART_TX_IDLE_NUM_V), "uart idle num error", ESP_FAIL);
 615      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 616      uart_hal_set_tx_idle_num(&(uart_context[uart_num].hal), idle_num);
 617      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 618      return ESP_OK;
 619  }
 620  
 621  esp_err_t uart_param_config(uart_port_t uart_num, const uart_config_t *uart_config)
 622  {
 623      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 624      UART_CHECK((uart_config), "param null", ESP_FAIL);
 625      UART_CHECK((uart_config->rx_flow_ctrl_thresh < SOC_UART_FIFO_LEN), "rx flow thresh error", ESP_FAIL);
 626      UART_CHECK((uart_config->flow_ctrl < UART_HW_FLOWCTRL_MAX), "hw_flowctrl mode error", ESP_FAIL);
 627      UART_CHECK((uart_config->data_bits < UART_DATA_BITS_MAX), "data bit error", ESP_FAIL);
 628      uart_module_enable(uart_num);
 629      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 630      uart_hal_init(&(uart_context[uart_num].hal), uart_num);
 631      uart_hal_set_baudrate(&(uart_context[uart_num].hal), uart_config->source_clk, uart_config->baud_rate);
 632      uart_hal_set_parity(&(uart_context[uart_num].hal), uart_config->parity);
 633      uart_hal_set_data_bit_num(&(uart_context[uart_num].hal), uart_config->data_bits);
 634      uart_hal_set_stop_bits(&(uart_context[uart_num].hal), uart_config->stop_bits);
 635      uart_hal_set_tx_idle_num(&(uart_context[uart_num].hal), UART_TX_IDLE_NUM_DEFAULT);
 636      uart_hal_set_hw_flow_ctrl(&(uart_context[uart_num].hal), uart_config->flow_ctrl, uart_config->rx_flow_ctrl_thresh);
 637      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 638      uart_hal_rxfifo_rst(&(uart_context[uart_num].hal));
 639      uart_hal_txfifo_rst(&(uart_context[uart_num].hal));
 640      return ESP_OK;
 641  }
 642  
 643  esp_err_t uart_intr_config(uart_port_t uart_num, const uart_intr_config_t *intr_conf)
 644  {
 645      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 646      UART_CHECK((intr_conf), "param null", ESP_FAIL);
 647      uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_LL_INTR_MASK);
 648      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
 649      if(intr_conf->intr_enable_mask & UART_INTR_RXFIFO_TOUT) {
 650          uart_hal_set_rx_timeout(&(uart_context[uart_num].hal), intr_conf->rx_timeout_thresh);
 651      } else {
 652          //Disable rx_tout intr
 653          uart_hal_set_rx_timeout(&(uart_context[uart_num].hal), 0);
 654      }
 655      if(intr_conf->intr_enable_mask & UART_INTR_RXFIFO_FULL) {
 656          uart_hal_set_rxfifo_full_thr(&(uart_context[uart_num].hal), intr_conf->rxfifo_full_thresh);
 657      }
 658      if(intr_conf->intr_enable_mask & UART_INTR_TXFIFO_EMPTY) {
 659          uart_hal_set_txfifo_empty_thr(&(uart_context[uart_num].hal), intr_conf->txfifo_empty_intr_thresh);
 660      }
 661      uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), intr_conf->intr_enable_mask);
 662      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
 663      return ESP_OK;
 664  }
 665  
 666  static int UART_ISR_ATTR uart_find_pattern_from_last(uint8_t* buf, int length, uint8_t pat_chr, uint8_t pat_num)
 667  {
 668      int cnt = 0;
 669      int len = length;
 670      while (len >= 0) {
 671          if (buf[len] == pat_chr) {
 672              cnt++;
 673          } else {
 674              cnt = 0;
 675          }
 676          if (cnt >= pat_num) {
 677              break;
 678          }
 679          len --;
 680      }
 681      return len;
 682  }
 683  
 684  //internal isr handler for default driver code.
 685  static void UART_ISR_ATTR uart_rx_intr_handler_default(void *param)
 686  {
 687      uart_obj_t *p_uart = (uart_obj_t*) param;
 688      uint8_t uart_num = p_uart->uart_num;
 689      int rx_fifo_len = 0;
 690      uint32_t uart_intr_status = 0;
 691      uart_event_t uart_event;
 692      portBASE_TYPE HPTaskAwoken = 0;
 693      static uint8_t pat_flg = 0;
 694      while(1) {
 695          // The `continue statement` may cause the interrupt to loop infinitely
 696          // we exit the interrupt here
 697          uart_intr_status = uart_hal_get_intsts_mask(&(uart_context[uart_num].hal));
 698          //Exit form while loop
 699          if(uart_intr_status == 0){
 700              break;
 701          }
 702          uart_event.type = UART_EVENT_MAX;
 703          if(uart_intr_status & UART_INTR_TXFIFO_EMPTY) {
 704              UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 705              uart_hal_disable_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TXFIFO_EMPTY);
 706              UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 707              uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TXFIFO_EMPTY);
 708              if(p_uart->tx_waiting_brk) {
 709                  continue;
 710              }
 711              //TX semaphore will only be used when tx_buf_size is zero.
 712              if(p_uart->tx_waiting_fifo == true && p_uart->tx_buf_size == 0) {
 713                  p_uart->tx_waiting_fifo = false;
 714                  xSemaphoreGiveFromISR(p_uart->tx_fifo_sem, &HPTaskAwoken);
 715              } else {
 716                  //We don't use TX ring buffer, because the size is zero.
 717                  if(p_uart->tx_buf_size == 0) {
 718                      continue;
 719                  }
 720                  bool en_tx_flg = false;
 721                  int tx_fifo_rem = uart_hal_get_txfifo_len(&(uart_context[uart_num].hal));
 722                  //We need to put a loop here, in case all the buffer items are very short.
 723                  //That would cause a watch_dog reset because empty interrupt happens so often.
 724                  //Although this is a loop in ISR, this loop will execute at most 128 turns.
 725                  while(tx_fifo_rem) {
 726                      if(p_uart->tx_len_tot == 0 || p_uart->tx_ptr == NULL || p_uart->tx_len_cur == 0) {
 727                          size_t size;
 728                          p_uart->tx_head = (uart_tx_data_t*) xRingbufferReceiveFromISR(p_uart->tx_ring_buf, &size);
 729                          if(p_uart->tx_head) {
 730                              //The first item is the data description
 731                              //Get the first item to get the data information
 732                              if(p_uart->tx_len_tot == 0) {
 733                                  p_uart->tx_ptr = NULL;
 734                                  p_uart->tx_len_tot = p_uart->tx_head->tx_data.size;
 735                                  if(p_uart->tx_head->type == UART_DATA_BREAK) {
 736                                      p_uart->tx_brk_flg = 1;
 737                                      p_uart->tx_brk_len = p_uart->tx_head->tx_data.brk_len;
 738                                  }
 739                                  //We have saved the data description from the 1st item, return buffer.
 740                                  vRingbufferReturnItemFromISR(p_uart->tx_ring_buf, p_uart->tx_head, &HPTaskAwoken);
 741                              } else if(p_uart->tx_ptr == NULL) {
 742                                  //Update the TX item pointer, we will need this to return item to buffer.
 743                                  p_uart->tx_ptr = (uint8_t*)p_uart->tx_head;
 744                                  en_tx_flg = true;
 745                                  p_uart->tx_len_cur = size;
 746                              }
 747                          } else {
 748                              //Can not get data from ring buffer, return;
 749                              break;
 750                          }
 751                      }
 752                      if (p_uart->tx_len_tot > 0 && p_uart->tx_ptr && p_uart->tx_len_cur > 0) {
 753                          //To fill the TX FIFO.
 754                          uint32_t send_len = 0;
 755                          // Set RS485 RTS pin before transmission if the half duplex mode is enabled
 756                          if (UART_IS_MODE_SET(uart_num, UART_MODE_RS485_HALF_DUPLEX)) {
 757                              UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 758                              uart_hal_set_rts(&(uart_context[uart_num].hal), 0);
 759                              uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TX_DONE);
 760                              UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 761                          }
 762                          uart_hal_write_txfifo(&(uart_context[uart_num].hal),
 763                                                (const uint8_t *)p_uart->tx_ptr,
 764                                                (p_uart->tx_len_cur > tx_fifo_rem) ? tx_fifo_rem : p_uart->tx_len_cur,
 765                                                &send_len);
 766                          p_uart->tx_ptr += send_len;
 767                          p_uart->tx_len_tot -= send_len;
 768                          p_uart->tx_len_cur -= send_len;
 769                          tx_fifo_rem -= send_len;
 770                          if (p_uart->tx_len_cur == 0) {
 771                              //Return item to ring buffer.
 772                              vRingbufferReturnItemFromISR(p_uart->tx_ring_buf, p_uart->tx_head, &HPTaskAwoken);
 773                              p_uart->tx_head = NULL;
 774                              p_uart->tx_ptr = NULL;
 775                              //Sending item done, now we need to send break if there is a record.
 776                              //Set TX break signal after FIFO is empty
 777                              if(p_uart->tx_len_tot == 0 && p_uart->tx_brk_flg == 1) {
 778                                  uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TX_BRK_DONE);
 779                                  UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 780                                  uart_hal_tx_break(&(uart_context[uart_num].hal), p_uart->tx_brk_len);
 781                                  uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TX_BRK_DONE);
 782                                  UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 783                                  p_uart->tx_waiting_brk = 1;
 784                                  //do not enable TX empty interrupt
 785                                  en_tx_flg = false;
 786                              } else {
 787                                  //enable TX empty interrupt
 788                                  en_tx_flg = true;
 789                              }
 790                          } else {
 791                              //enable TX empty interrupt
 792                              en_tx_flg = true;
 793                          }
 794                      }
 795                  }
 796                  if (en_tx_flg) {
 797                      uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TXFIFO_EMPTY);
 798                      UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 799                      uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TXFIFO_EMPTY);
 800                      UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 801                  }
 802              }
 803          }
 804          else if ((uart_intr_status & UART_INTR_RXFIFO_TOUT)
 805                  || (uart_intr_status & UART_INTR_RXFIFO_FULL)
 806                  || (uart_intr_status & UART_INTR_CMD_CHAR_DET)
 807                  ) {
 808              if(pat_flg == 1) {
 809                  uart_intr_status |= UART_INTR_CMD_CHAR_DET;
 810                  pat_flg = 0;
 811              }
 812              if (p_uart->rx_buffer_full_flg == false) {
 813                  rx_fifo_len = uart_hal_get_rxfifo_len(&(uart_context[uart_num].hal));
 814                  if ((p_uart_obj[uart_num]->rx_always_timeout_flg) && !(uart_intr_status & UART_INTR_RXFIFO_TOUT)) {
 815                      rx_fifo_len--; // leave one byte in the fifo in order to trigger uart_intr_rxfifo_tout
 816                  }
 817                  uart_hal_read_rxfifo(&(uart_context[uart_num].hal), p_uart->rx_data_buf, &rx_fifo_len);
 818                  uint8_t pat_chr = 0;
 819                  uint8_t pat_num = 0;
 820                  int pat_idx = -1;
 821                  uart_hal_get_at_cmd_char(&(uart_context[uart_num].hal), &pat_chr, &pat_num);
 822  
 823                  //Get the buffer from the FIFO
 824                  if (uart_intr_status & UART_INTR_CMD_CHAR_DET) {
 825                      uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_CMD_CHAR_DET);
 826                      uart_event.type = UART_PATTERN_DET;
 827                      uart_event.size = rx_fifo_len;
 828                      pat_idx = uart_find_pattern_from_last(p_uart->rx_data_buf, rx_fifo_len - 1, pat_chr, pat_num);
 829                  } else {
 830                      //After Copying the Data From FIFO ,Clear intr_status
 831                      uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_RXFIFO_TOUT | UART_INTR_RXFIFO_FULL);
 832                      uart_event.type = UART_DATA;
 833                      uart_event.size = rx_fifo_len;
 834                      uart_event.timeout_flag = (uart_intr_status & UART_INTR_RXFIFO_TOUT) ? true : false;
 835                      UART_ENTER_CRITICAL_ISR(&uart_selectlock);
 836                      if (p_uart->uart_select_notif_callback) {
 837                          p_uart->uart_select_notif_callback(uart_num, UART_SELECT_READ_NOTIF, &HPTaskAwoken);
 838                      }
 839                      UART_EXIT_CRITICAL_ISR(&uart_selectlock);
 840                  }
 841                  p_uart->rx_stash_len = rx_fifo_len;
 842                  //If we fail to push data to ring buffer, we will have to stash the data, and send next time.
 843                  //Mainly for applications that uses flow control or small ring buffer.
 844                  if(pdFALSE == xRingbufferSendFromISR(p_uart->rx_ring_buf, p_uart->rx_data_buf, p_uart->rx_stash_len, &HPTaskAwoken)) {
 845                      p_uart->rx_buffer_full_flg = true;
 846                      UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 847                      uart_hal_disable_intr_mask(&(uart_context[uart_num].hal), UART_INTR_RXFIFO_TOUT | UART_INTR_RXFIFO_FULL);
 848                      UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 849                      if (uart_event.type == UART_PATTERN_DET) {
 850                          UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 851                          if (rx_fifo_len < pat_num) {
 852                              //some of the characters are read out in last interrupt
 853                              uart_pattern_enqueue(uart_num, p_uart->rx_buffered_len - (pat_num - rx_fifo_len));
 854                          } else {
 855                              uart_pattern_enqueue(uart_num,
 856                                      pat_idx <= -1 ?
 857                                      //can not find the pattern in buffer,
 858                                      p_uart->rx_buffered_len + p_uart->rx_stash_len :
 859                                      // find the pattern in buffer
 860                                      p_uart->rx_buffered_len + pat_idx);
 861                          }
 862                          UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 863                          if ((p_uart->xQueueUart != NULL) && (pdFALSE == xQueueSendFromISR(p_uart->xQueueUart, (void * )&uart_event, &HPTaskAwoken))) {
 864                              ESP_EARLY_LOGV(UART_TAG, "UART event queue full");
 865                          }
 866                      }
 867                      uart_event.type = UART_BUFFER_FULL;
 868                  } else {
 869                      UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 870                      if (uart_intr_status & UART_INTR_CMD_CHAR_DET) {
 871                          if (rx_fifo_len < pat_num) {
 872                              //some of the characters are read out in last interrupt
 873                              uart_pattern_enqueue(uart_num, p_uart->rx_buffered_len - (pat_num - rx_fifo_len));
 874                          } else if(pat_idx >= 0) {
 875                              // find the pattern in stash buffer.
 876                              uart_pattern_enqueue(uart_num, p_uart->rx_buffered_len + pat_idx);
 877                          }
 878                      }
 879                      p_uart->rx_buffered_len += p_uart->rx_stash_len;
 880                      UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 881                  }
 882              } else {
 883                  UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 884                  uart_hal_disable_intr_mask(&(uart_context[uart_num].hal), UART_INTR_RXFIFO_FULL | UART_INTR_RXFIFO_TOUT);
 885                  UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 886                  uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_RXFIFO_FULL | UART_INTR_RXFIFO_TOUT);
 887                  if(uart_intr_status & UART_INTR_CMD_CHAR_DET) {
 888                      uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_CMD_CHAR_DET);
 889                      uart_event.type = UART_PATTERN_DET;
 890                      uart_event.size = rx_fifo_len;
 891                      pat_flg = 1;
 892                  }
 893              }
 894          } else if(uart_intr_status & UART_INTR_RXFIFO_OVF) {
 895              // When fifo overflows, we reset the fifo.
 896              UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 897              uart_hal_rxfifo_rst(&(uart_context[uart_num].hal));
 898              UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 899              UART_ENTER_CRITICAL_ISR(&uart_selectlock);
 900              if (p_uart->uart_select_notif_callback) {
 901                  p_uart->uart_select_notif_callback(uart_num, UART_SELECT_ERROR_NOTIF, &HPTaskAwoken);
 902              }
 903              UART_EXIT_CRITICAL_ISR(&uart_selectlock);
 904              uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_RXFIFO_OVF);
 905              uart_event.type = UART_FIFO_OVF;
 906          } else if(uart_intr_status & UART_INTR_BRK_DET) {
 907              uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_BRK_DET);
 908              uart_event.type = UART_BREAK;
 909          } else if(uart_intr_status & UART_INTR_FRAM_ERR) {
 910              UART_ENTER_CRITICAL_ISR(&uart_selectlock);
 911              if (p_uart->uart_select_notif_callback) {
 912                  p_uart->uart_select_notif_callback(uart_num, UART_SELECT_ERROR_NOTIF, &HPTaskAwoken);
 913              }
 914              UART_EXIT_CRITICAL_ISR(&uart_selectlock);
 915              uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_FRAM_ERR);
 916              uart_event.type = UART_FRAME_ERR;
 917          } else if(uart_intr_status & UART_INTR_PARITY_ERR) {
 918              UART_ENTER_CRITICAL_ISR(&uart_selectlock);
 919              if (p_uart->uart_select_notif_callback) {
 920                  p_uart->uart_select_notif_callback(uart_num, UART_SELECT_ERROR_NOTIF, &HPTaskAwoken);
 921              }
 922              UART_EXIT_CRITICAL_ISR(&uart_selectlock);
 923              uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_PARITY_ERR);
 924              uart_event.type = UART_PARITY_ERR;
 925          } else if(uart_intr_status & UART_INTR_TX_BRK_DONE) {
 926              UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 927              uart_hal_tx_break(&(uart_context[uart_num].hal), 0);
 928              uart_hal_disable_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TX_BRK_DONE);
 929              if(p_uart->tx_brk_flg == 1) {
 930                  uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TXFIFO_EMPTY);
 931              }
 932              UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 933              uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TX_BRK_DONE);
 934              if(p_uart->tx_brk_flg == 1) {
 935                  p_uart->tx_brk_flg = 0;
 936                  p_uart->tx_waiting_brk = 0;
 937              } else {
 938                  xSemaphoreGiveFromISR(p_uart->tx_brk_sem, &HPTaskAwoken);
 939              }
 940          } else if(uart_intr_status & UART_INTR_TX_BRK_IDLE) {
 941              UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 942              uart_hal_disable_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TX_BRK_IDLE);
 943              UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 944              uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TX_BRK_IDLE);
 945          } else if(uart_intr_status & UART_INTR_CMD_CHAR_DET) {
 946              uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_CMD_CHAR_DET);
 947              uart_event.type = UART_PATTERN_DET;
 948          } else if ((uart_intr_status & UART_INTR_RS485_PARITY_ERR)
 949                  || (uart_intr_status & UART_INTR_RS485_FRM_ERR)
 950                  || (uart_intr_status & UART_INTR_RS485_CLASH)) {
 951              // RS485 collision or frame error interrupt triggered
 952              UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 953              uart_hal_rxfifo_rst(&(uart_context[uart_num].hal));
 954              // Set collision detection flag
 955              p_uart_obj[uart_num]->coll_det_flg = true;
 956              UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 957              uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_RS485_CLASH | UART_INTR_RS485_FRM_ERR | UART_INTR_RS485_PARITY_ERR);
 958              uart_event.type = UART_EVENT_MAX;
 959          } else if(uart_intr_status & UART_INTR_TX_DONE) {
 960              if (UART_IS_MODE_SET(uart_num, UART_MODE_RS485_HALF_DUPLEX) && uart_hal_is_tx_idle(&(uart_context[uart_num].hal)) != true) {
 961                  // The TX_DONE interrupt is triggered but transmit is active
 962                  // then postpone interrupt processing for next interrupt
 963                  uart_event.type = UART_EVENT_MAX;
 964              } else {
 965                  // Workaround for RS485: If the RS485 half duplex mode is active
 966                  // and transmitter is in idle state then reset received buffer and reset RTS pin
 967                  // skip this behavior for other UART modes
 968                  UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 969                  uart_hal_disable_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TX_DONE);
 970                  if (UART_IS_MODE_SET(uart_num, UART_MODE_RS485_HALF_DUPLEX)) {
 971                      uart_hal_rxfifo_rst(&(uart_context[uart_num].hal));
 972                      uart_hal_set_rts(&(uart_context[uart_num].hal), 1);
 973                  }
 974                  UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
 975                  uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TX_DONE);
 976                  xSemaphoreGiveFromISR(p_uart_obj[uart_num]->tx_done_sem, &HPTaskAwoken);
 977              }
 978          } else {
 979              uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), uart_intr_status); /*simply clear all other intr status*/
 980              uart_event.type = UART_EVENT_MAX;
 981          }
 982  
 983          if(uart_event.type != UART_EVENT_MAX && p_uart->xQueueUart) {
 984              if (pdFALSE == xQueueSendFromISR(p_uart->xQueueUart, (void * )&uart_event, &HPTaskAwoken)) {
 985                  ESP_EARLY_LOGV(UART_TAG, "UART event queue full");
 986              }
 987          }
 988      }
 989      if(HPTaskAwoken == pdTRUE) {
 990          portYIELD_FROM_ISR();
 991      }
 992  }
 993  
 994  /**************************************************************/
 995  esp_err_t uart_wait_tx_done(uart_port_t uart_num, TickType_t ticks_to_wait)
 996  {
 997      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
 998      UART_CHECK((p_uart_obj[uart_num]), "uart driver error", ESP_FAIL);
 999      BaseType_t res;
1000      portTickType ticks_start = xTaskGetTickCount();
1001      //Take tx_mux
1002      res = xSemaphoreTake(p_uart_obj[uart_num]->tx_mux, (portTickType)ticks_to_wait);
1003      if(res == pdFALSE) {
1004          return ESP_ERR_TIMEOUT;
1005      }
1006      xSemaphoreTake(p_uart_obj[uart_num]->tx_done_sem, 0);
1007      if(uart_hal_is_tx_idle(&(uart_context[uart_num].hal))) {
1008          xSemaphoreGive(p_uart_obj[uart_num]->tx_mux);
1009          return ESP_OK;
1010      }
1011      uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TX_DONE);
1012      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
1013      uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TX_DONE);
1014      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
1015  
1016      TickType_t ticks_end = xTaskGetTickCount();
1017      if (ticks_end - ticks_start > ticks_to_wait) {
1018          ticks_to_wait = 0;
1019      } else {
1020          ticks_to_wait = ticks_to_wait - (ticks_end - ticks_start);
1021      }
1022      //take 2nd tx_done_sem, wait given from ISR
1023      res = xSemaphoreTake(p_uart_obj[uart_num]->tx_done_sem, (portTickType)ticks_to_wait);
1024      if(res == pdFALSE) {
1025          UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
1026          uart_hal_disable_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TX_DONE);
1027          UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
1028          xSemaphoreGive(p_uart_obj[uart_num]->tx_mux);
1029          return ESP_ERR_TIMEOUT;
1030      }
1031      xSemaphoreGive(p_uart_obj[uart_num]->tx_mux);
1032      return ESP_OK;
1033  }
1034  
1035  int uart_tx_chars(uart_port_t uart_num, const char* buffer, uint32_t len)
1036  {
1037      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", (-1));
1038      UART_CHECK((p_uart_obj[uart_num]), "uart driver error", (-1));
1039      UART_CHECK(buffer, "buffer null", (-1));
1040      if(len == 0) {
1041          return 0;
1042      }
1043      int tx_len = 0;
1044      xSemaphoreTake(p_uart_obj[uart_num]->tx_mux, (portTickType)portMAX_DELAY);
1045      if (UART_IS_MODE_SET(uart_num, UART_MODE_RS485_HALF_DUPLEX)) {
1046          UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
1047          uart_hal_set_rts(&(uart_context[uart_num].hal), 0);
1048          uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TX_DONE);
1049          UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
1050      }
1051      uart_hal_write_txfifo(&(uart_context[uart_num].hal), (const uint8_t*) buffer, len, (uint32_t *)&tx_len);
1052      xSemaphoreGive(p_uart_obj[uart_num]->tx_mux);
1053      return tx_len;
1054  }
1055  
1056  static int uart_tx_all(uart_port_t uart_num, const char* src, size_t size, bool brk_en, int brk_len)
1057  {
1058      if(size == 0) {
1059          return 0;
1060      }
1061      size_t original_size = size;
1062  
1063      //lock for uart_tx
1064      xSemaphoreTake(p_uart_obj[uart_num]->tx_mux, (portTickType)portMAX_DELAY);
1065      p_uart_obj[uart_num]->coll_det_flg = false;
1066      if(p_uart_obj[uart_num]->tx_buf_size > 0) {
1067          int max_size = xRingbufferGetMaxItemSize(p_uart_obj[uart_num]->tx_ring_buf);
1068          int offset = 0;
1069          uart_tx_data_t evt;
1070          evt.tx_data.size = size;
1071          evt.tx_data.brk_len = brk_len;
1072          if(brk_en) {
1073              evt.type = UART_DATA_BREAK;
1074          } else {
1075              evt.type = UART_DATA;
1076          }
1077          xRingbufferSend(p_uart_obj[uart_num]->tx_ring_buf, (void*) &evt, sizeof(uart_tx_data_t), portMAX_DELAY);
1078          while(size > 0) {
1079              int send_size = size > max_size / 2 ? max_size / 2 : size;
1080              xRingbufferSend(p_uart_obj[uart_num]->tx_ring_buf, (void*) (src + offset), send_size, portMAX_DELAY);
1081              size -= send_size;
1082              offset += send_size;
1083              uart_enable_tx_intr(uart_num, 1, UART_EMPTY_THRESH_DEFAULT);
1084          }
1085      } else {
1086          while(size) {
1087              //semaphore for tx_fifo available
1088              if(pdTRUE == xSemaphoreTake(p_uart_obj[uart_num]->tx_fifo_sem, (portTickType)portMAX_DELAY)) {
1089                  uint32_t sent = 0;
1090                  if (UART_IS_MODE_SET(uart_num, UART_MODE_RS485_HALF_DUPLEX)) {
1091                      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
1092                      uart_hal_set_rts(&(uart_context[uart_num].hal), 0);
1093                      uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TX_DONE);
1094                      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
1095                  }
1096                  uart_hal_write_txfifo(&(uart_context[uart_num].hal), (const uint8_t*)src, size, &sent);
1097                  if(sent < size) {
1098                      p_uart_obj[uart_num]->tx_waiting_fifo = true;
1099                      uart_enable_tx_intr(uart_num, 1, UART_EMPTY_THRESH_DEFAULT);
1100                  }
1101                  size -= sent;
1102                  src += sent;
1103              }
1104          }
1105          if(brk_en) {
1106              uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TX_BRK_DONE);
1107              UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
1108              uart_hal_tx_break(&(uart_context[uart_num].hal), brk_len);
1109              uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TX_BRK_DONE);
1110              UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
1111              xSemaphoreTake(p_uart_obj[uart_num]->tx_brk_sem, (portTickType)portMAX_DELAY);
1112          }
1113          xSemaphoreGive(p_uart_obj[uart_num]->tx_fifo_sem);
1114      }
1115      xSemaphoreGive(p_uart_obj[uart_num]->tx_mux);
1116      return original_size;
1117  }
1118  
1119  int uart_write_bytes(uart_port_t uart_num, const void* src, size_t size)
1120  {
1121      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", (-1));
1122      UART_CHECK((p_uart_obj[uart_num] != NULL), "uart driver error", (-1));
1123      UART_CHECK(src, "buffer null", (-1));
1124      return uart_tx_all(uart_num, src, size, 0, 0);
1125  }
1126  
1127  int uart_write_bytes_with_break(uart_port_t uart_num, const void* src, size_t size, int brk_len)
1128  {
1129      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", (-1));
1130      UART_CHECK((p_uart_obj[uart_num]), "uart driver error", (-1));
1131      UART_CHECK((size > 0), "uart size error", (-1));
1132      UART_CHECK((src), "uart data null", (-1));
1133      UART_CHECK((brk_len > 0 && brk_len < 256), "break_num error", (-1));
1134      return uart_tx_all(uart_num, src, size, 1, brk_len);
1135  }
1136  
1137  static bool uart_check_buf_full(uart_port_t uart_num)
1138  {
1139      if(p_uart_obj[uart_num]->rx_buffer_full_flg) {
1140          BaseType_t res = xRingbufferSend(p_uart_obj[uart_num]->rx_ring_buf, p_uart_obj[uart_num]->rx_data_buf, p_uart_obj[uart_num]->rx_stash_len, 1);
1141          if(res == pdTRUE) {
1142              UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
1143              p_uart_obj[uart_num]->rx_buffered_len += p_uart_obj[uart_num]->rx_stash_len;
1144              p_uart_obj[uart_num]->rx_buffer_full_flg = false;
1145              UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
1146              uart_enable_rx_intr(p_uart_obj[uart_num]->uart_num);
1147              return true;
1148          }
1149      }
1150      return false;
1151  }
1152  
1153  int uart_read_bytes(uart_port_t uart_num, void* buf, uint32_t length, TickType_t ticks_to_wait)
1154  {
1155      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", (-1));
1156      UART_CHECK((buf), "uart data null", (-1));
1157      UART_CHECK((p_uart_obj[uart_num]), "uart driver error", (-1));
1158      uint8_t* data = NULL;
1159      size_t size;
1160      size_t copy_len = 0;
1161      int len_tmp;
1162      if(xSemaphoreTake(p_uart_obj[uart_num]->rx_mux,(portTickType)ticks_to_wait) != pdTRUE) {
1163          return -1;
1164      }
1165      while(length) {
1166          if(p_uart_obj[uart_num]->rx_cur_remain == 0) {
1167              data = (uint8_t*) xRingbufferReceive(p_uart_obj[uart_num]->rx_ring_buf, &size, (portTickType) ticks_to_wait);
1168              if(data) {
1169                  p_uart_obj[uart_num]->rx_head_ptr = data;
1170                  p_uart_obj[uart_num]->rx_ptr = data;
1171                  p_uart_obj[uart_num]->rx_cur_remain = size;
1172              } else {
1173                  //When using dual cores, `rx_buffer_full_flg` may read and write on different cores at same time,
1174                  //which may lose synchronization. So we also need to call `uart_check_buf_full` once when ringbuffer is empty
1175                  //to solve the possible asynchronous issues.
1176                  if(uart_check_buf_full(uart_num)) {
1177                      //This condition will never be true if `uart_read_bytes`
1178                      //and `uart_rx_intr_handler_default` are scheduled on the same core.
1179                      continue;
1180                  } else {
1181                      xSemaphoreGive(p_uart_obj[uart_num]->rx_mux);
1182                      return copy_len;
1183                  }
1184              }
1185          }
1186          if(p_uart_obj[uart_num]->rx_cur_remain > length) {
1187              len_tmp = length;
1188          } else {
1189              len_tmp = p_uart_obj[uart_num]->rx_cur_remain;
1190          }
1191          memcpy((uint8_t *)buf + copy_len, p_uart_obj[uart_num]->rx_ptr, len_tmp);
1192          UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
1193          p_uart_obj[uart_num]->rx_buffered_len -= len_tmp;
1194          uart_pattern_queue_update(uart_num, len_tmp);
1195          p_uart_obj[uart_num]->rx_ptr += len_tmp;
1196          UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
1197          p_uart_obj[uart_num]->rx_cur_remain -= len_tmp;
1198          copy_len += len_tmp;
1199          length -= len_tmp;
1200          if(p_uart_obj[uart_num]->rx_cur_remain == 0) {
1201              vRingbufferReturnItem(p_uart_obj[uart_num]->rx_ring_buf, p_uart_obj[uart_num]->rx_head_ptr);
1202              p_uart_obj[uart_num]->rx_head_ptr = NULL;
1203              p_uart_obj[uart_num]->rx_ptr = NULL;
1204              uart_check_buf_full(uart_num);
1205          }
1206      }
1207  
1208      xSemaphoreGive(p_uart_obj[uart_num]->rx_mux);
1209      return copy_len;
1210  }
1211  
1212  esp_err_t uart_get_buffered_data_len(uart_port_t uart_num, size_t* size)
1213  {
1214      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
1215      UART_CHECK((p_uart_obj[uart_num]), "uart driver error", ESP_FAIL);
1216      *size = p_uart_obj[uart_num]->rx_buffered_len;
1217      return ESP_OK;
1218  }
1219  
1220  esp_err_t uart_flush(uart_port_t uart_num) __attribute__((alias("uart_flush_input")));
1221  
1222  esp_err_t uart_flush_input(uart_port_t uart_num)
1223  {
1224      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
1225      UART_CHECK((p_uart_obj[uart_num]), "uart driver error", ESP_FAIL);
1226      uart_obj_t* p_uart = p_uart_obj[uart_num];
1227      uint8_t* data;
1228      size_t size;
1229  
1230      //rx sem protect the ring buffer read related functions
1231      xSemaphoreTake(p_uart->rx_mux, (portTickType)portMAX_DELAY);
1232      uart_disable_rx_intr(p_uart_obj[uart_num]->uart_num);
1233      while(true) {
1234          if(p_uart->rx_head_ptr) {
1235              vRingbufferReturnItem(p_uart->rx_ring_buf, p_uart->rx_head_ptr);
1236              UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
1237              p_uart_obj[uart_num]->rx_buffered_len -= p_uart->rx_cur_remain;
1238              uart_pattern_queue_update(uart_num, p_uart->rx_cur_remain);
1239              UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
1240              p_uart->rx_ptr = NULL;
1241              p_uart->rx_cur_remain = 0;
1242              p_uart->rx_head_ptr = NULL;
1243          }
1244          data = (uint8_t*) xRingbufferReceive(p_uart->rx_ring_buf, &size, (portTickType) 0);
1245          if(data == NULL) {
1246              if( p_uart_obj[uart_num]->rx_buffered_len != 0 ) {
1247                  ESP_LOGE(UART_TAG, "rx_buffered_len error");
1248                  p_uart_obj[uart_num]->rx_buffered_len = 0;
1249              }
1250              //We also need to clear the `rx_buffer_full_flg` here.
1251              UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
1252              p_uart_obj[uart_num]->rx_buffer_full_flg = false;
1253              UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
1254              break;
1255          }
1256          UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
1257          p_uart_obj[uart_num]->rx_buffered_len -= size;
1258          uart_pattern_queue_update(uart_num, size);
1259          UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
1260          vRingbufferReturnItem(p_uart->rx_ring_buf, data);
1261          if(p_uart_obj[uart_num]->rx_buffer_full_flg) {
1262              BaseType_t res = xRingbufferSend(p_uart_obj[uart_num]->rx_ring_buf, p_uart_obj[uart_num]->rx_data_buf, p_uart_obj[uart_num]->rx_stash_len, 1);
1263              if(res == pdTRUE) {
1264                  UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
1265                  p_uart_obj[uart_num]->rx_buffered_len += p_uart_obj[uart_num]->rx_stash_len;
1266                  p_uart_obj[uart_num]->rx_buffer_full_flg = false;
1267                  UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
1268              }
1269          }
1270      }
1271      p_uart->rx_ptr = NULL;
1272      p_uart->rx_cur_remain = 0;
1273      p_uart->rx_head_ptr = NULL;
1274      uart_hal_rxfifo_rst(&(uart_context[uart_num].hal));
1275      uart_enable_rx_intr(p_uart_obj[uart_num]->uart_num);
1276      xSemaphoreGive(p_uart->rx_mux);
1277      return ESP_OK;
1278  }
1279  
1280  esp_err_t uart_driver_install(uart_port_t uart_num, int rx_buffer_size, int tx_buffer_size, int queue_size, QueueHandle_t *uart_queue, int intr_alloc_flags)
1281  {
1282      esp_err_t r;
1283      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
1284      UART_CHECK((rx_buffer_size > SOC_UART_FIFO_LEN), "uart rx buffer length error", ESP_FAIL);
1285      UART_CHECK((tx_buffer_size > SOC_UART_FIFO_LEN) || (tx_buffer_size == 0), "uart tx buffer length error", ESP_FAIL);
1286  #if CONFIG_UART_ISR_IN_IRAM
1287      if ((intr_alloc_flags & ESP_INTR_FLAG_IRAM) == 0) {
1288          ESP_LOGI(UART_TAG, "ESP_INTR_FLAG_IRAM flag not set while CONFIG_UART_ISR_IN_IRAM is enabled, flag updated");
1289          intr_alloc_flags |= ESP_INTR_FLAG_IRAM;
1290      }
1291  #else
1292      if ((intr_alloc_flags & ESP_INTR_FLAG_IRAM) != 0) {
1293          ESP_LOGW(UART_TAG, "ESP_INTR_FLAG_IRAM flag is set while CONFIG_UART_ISR_IN_IRAM is not enabled, flag updated");
1294          intr_alloc_flags &= ~ESP_INTR_FLAG_IRAM;
1295      }
1296  #endif
1297  
1298      if(p_uart_obj[uart_num] == NULL) {
1299          p_uart_obj[uart_num] = (uart_obj_t*) heap_caps_calloc(1, sizeof(uart_obj_t), MALLOC_CAP_INTERNAL|MALLOC_CAP_8BIT);
1300          if(p_uart_obj[uart_num] == NULL) {
1301              ESP_LOGE(UART_TAG, "UART driver malloc error");
1302              return ESP_FAIL;
1303          }
1304          p_uart_obj[uart_num]->uart_num = uart_num;
1305          p_uart_obj[uart_num]->uart_mode = UART_MODE_UART;
1306          p_uart_obj[uart_num]->coll_det_flg = false;
1307          p_uart_obj[uart_num]->rx_always_timeout_flg = false;
1308          p_uart_obj[uart_num]->tx_fifo_sem = xSemaphoreCreateBinary();
1309          xSemaphoreGive(p_uart_obj[uart_num]->tx_fifo_sem);
1310          p_uart_obj[uart_num]->tx_done_sem = xSemaphoreCreateBinary();
1311          p_uart_obj[uart_num]->tx_brk_sem = xSemaphoreCreateBinary();
1312          p_uart_obj[uart_num]->tx_mux = xSemaphoreCreateMutex();
1313          p_uart_obj[uart_num]->rx_mux = xSemaphoreCreateMutex();
1314          p_uart_obj[uart_num]->queue_size = queue_size;
1315          p_uart_obj[uart_num]->tx_ptr = NULL;
1316          p_uart_obj[uart_num]->tx_head = NULL;
1317          p_uart_obj[uart_num]->tx_len_tot = 0;
1318          p_uart_obj[uart_num]->tx_brk_flg = 0;
1319          p_uart_obj[uart_num]->tx_brk_len = 0;
1320          p_uart_obj[uart_num]->tx_waiting_brk = 0;
1321          p_uart_obj[uart_num]->rx_buffered_len = 0;
1322          uart_pattern_queue_reset(uart_num, UART_PATTERN_DET_QLEN_DEFAULT);
1323  
1324          if(uart_queue) {
1325              p_uart_obj[uart_num]->xQueueUart = xQueueCreate(queue_size, sizeof(uart_event_t));
1326              *uart_queue = p_uart_obj[uart_num]->xQueueUart;
1327              ESP_LOGI(UART_TAG, "queue free spaces: %d", uxQueueSpacesAvailable(p_uart_obj[uart_num]->xQueueUart));
1328          } else {
1329              p_uart_obj[uart_num]->xQueueUart = NULL;
1330          }
1331          p_uart_obj[uart_num]->rx_buffer_full_flg = false;
1332          p_uart_obj[uart_num]->tx_waiting_fifo = false;
1333          p_uart_obj[uart_num]->rx_ptr = NULL;
1334          p_uart_obj[uart_num]->rx_cur_remain = 0;
1335          p_uart_obj[uart_num]->rx_head_ptr = NULL;
1336          p_uart_obj[uart_num]->rx_ring_buf = xRingbufferCreate(rx_buffer_size, RINGBUF_TYPE_BYTEBUF);
1337          if(tx_buffer_size > 0) {
1338              p_uart_obj[uart_num]->tx_ring_buf = xRingbufferCreate(tx_buffer_size, RINGBUF_TYPE_NOSPLIT);
1339              p_uart_obj[uart_num]->tx_buf_size = tx_buffer_size;
1340          } else {
1341              p_uart_obj[uart_num]->tx_ring_buf = NULL;
1342              p_uart_obj[uart_num]->tx_buf_size = 0;
1343          }
1344          p_uart_obj[uart_num]->uart_select_notif_callback = NULL;
1345      } else {
1346          ESP_LOGE(UART_TAG, "UART driver already installed");
1347          return ESP_FAIL;
1348      }
1349  
1350      uart_intr_config_t uart_intr = {
1351          .intr_enable_mask = UART_INTR_CONFIG_FLAG,
1352          .rxfifo_full_thresh = UART_FULL_THRESH_DEFAULT,
1353          .rx_timeout_thresh = UART_TOUT_THRESH_DEFAULT,
1354          .txfifo_empty_intr_thresh = UART_EMPTY_THRESH_DEFAULT,
1355      };
1356      uart_module_enable(uart_num);
1357      uart_hal_disable_intr_mask(&(uart_context[uart_num].hal), UART_LL_INTR_MASK);
1358      uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_LL_INTR_MASK);
1359      r=uart_isr_register(uart_num, uart_rx_intr_handler_default, p_uart_obj[uart_num], intr_alloc_flags, &p_uart_obj[uart_num]->intr_handle);
1360      if (r!=ESP_OK) goto err;
1361      r=uart_intr_config(uart_num, &uart_intr);
1362      if (r!=ESP_OK) goto err;
1363      return r;
1364  
1365  err:
1366      uart_driver_delete(uart_num);
1367      return r;
1368  }
1369  
1370  //Make sure no other tasks are still using UART before you call this function
1371  esp_err_t uart_driver_delete(uart_port_t uart_num)
1372  {
1373      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_FAIL);
1374      if(p_uart_obj[uart_num] == NULL) {
1375          ESP_LOGI(UART_TAG, "ALREADY NULL");
1376          return ESP_OK;
1377      }
1378      esp_intr_free(p_uart_obj[uart_num]->intr_handle);
1379      uart_disable_rx_intr(uart_num);
1380      uart_disable_tx_intr(uart_num);
1381      uart_pattern_link_free(uart_num);
1382  
1383      if(p_uart_obj[uart_num]->tx_fifo_sem) {
1384          vSemaphoreDelete(p_uart_obj[uart_num]->tx_fifo_sem);
1385          p_uart_obj[uart_num]->tx_fifo_sem = NULL;
1386      }
1387      if(p_uart_obj[uart_num]->tx_done_sem) {
1388          vSemaphoreDelete(p_uart_obj[uart_num]->tx_done_sem);
1389          p_uart_obj[uart_num]->tx_done_sem = NULL;
1390      }
1391      if(p_uart_obj[uart_num]->tx_brk_sem) {
1392          vSemaphoreDelete(p_uart_obj[uart_num]->tx_brk_sem);
1393          p_uart_obj[uart_num]->tx_brk_sem = NULL;
1394      }
1395      if(p_uart_obj[uart_num]->tx_mux) {
1396          vSemaphoreDelete(p_uart_obj[uart_num]->tx_mux);
1397          p_uart_obj[uart_num]->tx_mux = NULL;
1398      }
1399      if(p_uart_obj[uart_num]->rx_mux) {
1400          vSemaphoreDelete(p_uart_obj[uart_num]->rx_mux);
1401          p_uart_obj[uart_num]->rx_mux = NULL;
1402      }
1403      if(p_uart_obj[uart_num]->xQueueUart) {
1404          vQueueDelete(p_uart_obj[uart_num]->xQueueUart);
1405          p_uart_obj[uart_num]->xQueueUart = NULL;
1406      }
1407      if(p_uart_obj[uart_num]->rx_ring_buf) {
1408          vRingbufferDelete(p_uart_obj[uart_num]->rx_ring_buf);
1409          p_uart_obj[uart_num]->rx_ring_buf = NULL;
1410      }
1411      if(p_uart_obj[uart_num]->tx_ring_buf) {
1412          vRingbufferDelete(p_uart_obj[uart_num]->tx_ring_buf);
1413          p_uart_obj[uart_num]->tx_ring_buf = NULL;
1414      }
1415  
1416      heap_caps_free(p_uart_obj[uart_num]);
1417      p_uart_obj[uart_num] = NULL;
1418  
1419      uart_module_disable(uart_num);
1420      return ESP_OK;
1421  }
1422  
1423  bool uart_is_driver_installed(uart_port_t uart_num)
1424  {
1425      return uart_num < UART_NUM_MAX && (p_uart_obj[uart_num] != NULL);
1426  }
1427  
1428  void uart_set_select_notif_callback(uart_port_t uart_num, uart_select_notif_callback_t uart_select_notif_callback)
1429  {
1430      if (uart_num < UART_NUM_MAX && p_uart_obj[uart_num]) {
1431          p_uart_obj[uart_num]->uart_select_notif_callback = (uart_select_notif_callback_t) uart_select_notif_callback;
1432      }
1433  }
1434  
1435  portMUX_TYPE *uart_get_selectlock(void)
1436  {
1437      return &uart_selectlock;
1438  }
1439  
1440  // Set UART mode
1441  esp_err_t uart_set_mode(uart_port_t uart_num, uart_mode_t mode)
1442  {
1443      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_ERR_INVALID_ARG);
1444      UART_CHECK((p_uart_obj[uart_num]), "uart driver error", ESP_ERR_INVALID_STATE);
1445      if ((mode == UART_MODE_RS485_COLLISION_DETECT) || (mode == UART_MODE_RS485_APP_CTRL)
1446              || (mode == UART_MODE_RS485_HALF_DUPLEX)) {
1447          UART_CHECK((!uart_hal_is_hw_rts_en(&(uart_context[uart_num].hal))),
1448                  "disable hw flowctrl before using RS485 mode", ESP_ERR_INVALID_ARG);
1449      }
1450      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
1451      uart_hal_set_mode(&(uart_context[uart_num].hal), mode);
1452      if(mode ==  UART_MODE_RS485_COLLISION_DETECT) {
1453          // This mode allows read while transmitting that allows collision detection
1454          p_uart_obj[uart_num]->coll_det_flg = false;
1455          // Enable collision detection interrupts
1456          uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_RXFIFO_TOUT
1457                                          | UART_INTR_RXFIFO_FULL
1458                                          | UART_INTR_RS485_CLASH
1459                                          | UART_INTR_RS485_FRM_ERR
1460                                          | UART_INTR_RS485_PARITY_ERR);
1461      }
1462      p_uart_obj[uart_num]->uart_mode = mode;
1463      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
1464      return ESP_OK;
1465  }
1466  
1467  esp_err_t uart_set_rx_full_threshold(uart_port_t uart_num, int threshold)
1468  {
1469      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_ERR_INVALID_ARG);
1470      UART_CHECK((threshold < UART_RXFIFO_FULL_THRHD_V) && (threshold > 0),
1471          "rx fifo full threshold value error", ESP_ERR_INVALID_ARG);
1472      if (p_uart_obj[uart_num] == NULL) {
1473          ESP_LOGE(UART_TAG, "call uart_driver_install API first");
1474          return ESP_ERR_INVALID_STATE;
1475      }
1476      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
1477      if (uart_hal_get_intr_ena_status(&(uart_context[uart_num].hal)) & UART_INTR_RXFIFO_FULL) {
1478          uart_hal_set_rxfifo_full_thr(&(uart_context[uart_num].hal), threshold);
1479      }
1480      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
1481      return ESP_OK;
1482  }
1483  
1484  esp_err_t uart_set_tx_empty_threshold(uart_port_t uart_num, int threshold)
1485  {
1486      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_ERR_INVALID_ARG);
1487      UART_CHECK((threshold < UART_TXFIFO_EMPTY_THRHD_V) && (threshold > 0),
1488          "tx fifo empty threshold value error", ESP_ERR_INVALID_ARG);
1489      if (p_uart_obj[uart_num] == NULL) {
1490          ESP_LOGE(UART_TAG, "call uart_driver_install API first");
1491          return ESP_ERR_INVALID_STATE;
1492      }
1493      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
1494      if (uart_hal_get_intr_ena_status(&(uart_context[uart_num].hal)) & UART_INTR_TXFIFO_EMPTY) {
1495          uart_hal_set_txfifo_empty_thr(&(uart_context[uart_num].hal), threshold);
1496      }
1497      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
1498      return ESP_OK;
1499  }
1500  
1501  esp_err_t uart_set_rx_timeout(uart_port_t uart_num, const uint8_t tout_thresh)
1502  {
1503      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_ERR_INVALID_ARG);
1504      // get maximum timeout threshold
1505      uint16_t tout_max_thresh = uart_hal_get_max_rx_timeout_thrd(&(uart_context[uart_num].hal));
1506      if (tout_thresh > tout_max_thresh) {
1507          ESP_LOGE(UART_TAG, "tout_thresh = %d > maximum value = %d", tout_thresh, tout_max_thresh);
1508          return ESP_ERR_INVALID_ARG;
1509      }
1510      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
1511      uart_hal_set_rx_timeout(&(uart_context[uart_num].hal), tout_thresh);
1512      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
1513      return ESP_OK;
1514  }
1515  
1516  esp_err_t uart_get_collision_flag(uart_port_t uart_num, bool* collision_flag)
1517  {
1518      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_ERR_INVALID_ARG);
1519      UART_CHECK((p_uart_obj[uart_num]), "uart driver error", ESP_FAIL);
1520      UART_CHECK((collision_flag != NULL), "wrong parameter pointer", ESP_ERR_INVALID_ARG);
1521      UART_CHECK((UART_IS_MODE_SET(uart_num, UART_MODE_RS485_HALF_DUPLEX)
1522                      || UART_IS_MODE_SET(uart_num, UART_MODE_RS485_COLLISION_DETECT)),
1523                      "wrong mode", ESP_ERR_INVALID_ARG);
1524      *collision_flag = p_uart_obj[uart_num]->coll_det_flg;
1525      return ESP_OK;
1526  }
1527  
1528  esp_err_t uart_set_wakeup_threshold(uart_port_t uart_num, int wakeup_threshold)
1529  {
1530      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_ERR_INVALID_ARG);
1531      UART_CHECK((wakeup_threshold <= UART_ACTIVE_THRESHOLD_V &&
1532                  wakeup_threshold > UART_MIN_WAKEUP_THRESH),
1533                  "wakeup_threshold out of bounds", ESP_ERR_INVALID_ARG);
1534      UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
1535      uart_hal_set_wakeup_thrd(&(uart_context[uart_num].hal), wakeup_threshold);
1536      UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
1537      return ESP_OK;
1538  }
1539  
1540  esp_err_t uart_get_wakeup_threshold(uart_port_t uart_num, int* out_wakeup_threshold)
1541  {
1542      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_ERR_INVALID_ARG);
1543      UART_CHECK((out_wakeup_threshold != NULL), "argument is NULL", ESP_ERR_INVALID_ARG);
1544      uart_hal_get_wakeup_thrd(&(uart_context[uart_num].hal), (uint32_t *)out_wakeup_threshold);
1545      return ESP_OK;
1546  }
1547  
1548  esp_err_t uart_wait_tx_idle_polling(uart_port_t uart_num)
1549  {
1550      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_ERR_INVALID_ARG);
1551      while(!uart_hal_is_tx_idle(&(uart_context[uart_num].hal)));
1552      return ESP_OK;
1553  }
1554  
1555  esp_err_t uart_set_loop_back(uart_port_t uart_num, bool loop_back_en)
1556  {
1557      UART_CHECK((uart_num < UART_NUM_MAX), "uart_num error", ESP_ERR_INVALID_ARG);
1558      uart_hal_set_loop_back(&(uart_context[uart_num].hal), loop_back_en);
1559      return ESP_OK;
1560  }
1561  
1562  void uart_set_always_rx_timeout(uart_port_t uart_num, bool always_rx_timeout)
1563  {
1564      uint16_t rx_tout = uart_hal_get_rx_tout_thr(&(uart_context[uart_num].hal));
1565      if (rx_tout) {
1566          p_uart_obj[uart_num]->rx_always_timeout_flg = always_rx_timeout;
1567      } else {
1568          p_uart_obj[uart_num]->rx_always_timeout_flg = false;
1569      }
1570  }