diff mbox series

[PULL,10/88] esp.c: move buffer and TC logic into separate to/from device paths in esp_do_dma()

Message ID 20240213194052.1162753-11-mark.cave-ayland@ilande.co.uk (mailing list archive)
State New, archived
Headers show
Series [PULL,01/88] esp.c: don't clear cmdfifo when esp_select() fails in get_cmd() | expand

Commit Message

Mark Cave-Ayland Feb. 13, 2024, 7:39 p.m. UTC
The ultimate aim is to for esp_do_dma() behaviour to be determined by the SCSI
bus phase, in which case it is necessary to have separate to/from device paths.

Signed-off-by: Mark Cave-Ayland <mark.cave-ayland@ilande.co.uk>
Tested-by: Helge Deller <deller@gmx.de>
Tested-by: Thomas Huth <thuth@redhat.com>
Message-Id: <20240112125420.514425-11-mark.cave-ayland@ilande.co.uk>
Signed-off-by: Mark Cave-Ayland <mark.cave-ayland@ilande.co.uk>
---
 hw/scsi/esp.c | 65 +++++++++++++++++++++++++++++++--------------------
 1 file changed, 40 insertions(+), 25 deletions(-)
diff mbox series

Patch

diff --git a/hw/scsi/esp.c b/hw/scsi/esp.c
index 63c828c1b2..9893840255 100644
--- a/hw/scsi/esp.c
+++ b/hw/scsi/esp.c
@@ -678,14 +678,53 @@  static void esp_do_dma(ESPState *s)
     if (to_device) {
         if (s->dma_memory_read) {
             s->dma_memory_read(s->dma_opaque, s->async_buf, len);
+
+            esp_set_tc(s, esp_get_tc(s) - len);
+            s->async_buf += len;
+            s->async_len -= len;
+            s->ti_size += len;
+
+            if (s->async_len == 0) {
+                scsi_req_continue(s->current_req);
+                /*
+                 * If there is still data to be read from the device then
+                 * complete the DMA operation immediately.  Otherwise defer
+                 * until the scsi layer has completed.
+                 */
+                return;
+            }
+
+            /* Partially filled a scsi buffer. Complete immediately.  */
+            esp_dma_done(s);
+            esp_lower_drq(s);
         } else {
             esp_set_pdma_cb(s, DO_DMA_PDMA_CB);
             esp_raise_drq(s);
-            return;
         }
     } else {
         if (s->dma_memory_write) {
             s->dma_memory_write(s->dma_opaque, s->async_buf, len);
+
+            esp_set_tc(s, esp_get_tc(s) - len);
+            s->async_buf += len;
+            s->async_len -= len;
+            s->ti_size -= len;
+
+            if (s->async_len == 0) {
+                scsi_req_continue(s->current_req);
+                /*
+                 * If there is still data to be read from the device then
+                 * complete the DMA operation immediately.  Otherwise defer
+                 * until the scsi layer has completed.
+                 */
+                if (esp_get_tc(s) != 0 || s->ti_size == 0) {
+                    return;
+                }
+            }
+
+            /* Partially filled a scsi buffer. Complete immediately.  */
+            esp_dma_done(s);
+            esp_lower_drq(s);
         } else {
             /* Adjust TC for any leftover data in the FIFO */
             if (!fifo8_is_empty(&s->fifo)) {
@@ -713,32 +752,8 @@  static void esp_do_dma(ESPState *s)
             esp_set_tc(s, esp_get_tc(s) - len);
             esp_set_pdma_cb(s, DO_DMA_PDMA_CB);
             esp_raise_drq(s);
-            return;
         }
     }
-    esp_set_tc(s, esp_get_tc(s) - len);
-    s->async_buf += len;
-    s->async_len -= len;
-    if (to_device) {
-        s->ti_size += len;
-    } else {
-        s->ti_size -= len;
-    }
-    if (s->async_len == 0) {
-        scsi_req_continue(s->current_req);
-        /*
-         * If there is still data to be read from the device then
-         * complete the DMA operation immediately.  Otherwise defer
-         * until the scsi layer has completed.
-         */
-        if (to_device || esp_get_tc(s) != 0 || s->ti_size == 0) {
-            return;
-        }
-    }
-
-    /* Partially filled a scsi buffer. Complete immediately.  */
-    esp_dma_done(s);
-    esp_lower_drq(s);
 }
 
 static void esp_do_nodma(ESPState *s)