From 1bcfba305bf8118268080e5b4bf762460d95d8f7 Mon Sep 17 00:00:00 2001 From: drbh Date: Thu, 5 Dec 2024 10:58:37 -0500 Subject: [PATCH 1/5] feat: tokenize each request individually and increase warmup image size --- backends/client/src/lib.rs | 2 +- backends/v2/src/client/mod.rs | 2 +- backends/v3/src/client/mod.rs | 2 +- .../models/test_flash_qwen2_vl_warmup.py | 38 +++++++++++++++++++ .../models/custom_modeling/qwen2_vl.py | 1 + .../models/vlm_causal_lm.py | 8 ---- 6 files changed, 42 insertions(+), 11 deletions(-) create mode 100644 integration-tests/models/test_flash_qwen2_vl_warmup.py diff --git a/backends/client/src/lib.rs b/backends/client/src/lib.rs index 45bee10ca50..e33d23480e5 100644 --- a/backends/client/src/lib.rs +++ b/backends/client/src/lib.rs @@ -86,6 +86,6 @@ impl ChunksToString for Vec { } } -static WARMUP_IMAGE_BASE64 :&str = "iVBORw0KGgoAAAANSUhEUgAAABQAAAAUCAIAAAAC64paAAABg2lDQ1BJQ0MgcHJvZmlsZQAAKJF9kT1Iw0AcxV/TSotUROxQxCFDdbKLijjWKhShQqgVWnUwufQLmrQkKS6OgmvBwY/FqoOLs64OroIg+AHi7OCk6CIl/i8ptIjx4Lgf7+497t4BQqvKNDOQADTdMjKppJjLr4rBVwQQwhAERGVm1uckKQ3P8XUPH1/v4jzL+9yfY0AtmAzwicQJVjcs4g3imU2rznmfOMLKskp8Tjxh0AWJH7muuPzGueSwwDMjRjYzTxwhFks9rPQwKxsa8TRxTNV0yhdyLquctzhr1Qbr3JO/MFzQV5a5TnMUKSxiCRJEKGiggiosxGnVSTGRof2kh3/E8UvkUshVASPHAmrQIDt+8D/43a1ZnJp0k8JJoO/Ftj/GgOAu0G7a9vexbbdPAP8zcKV3/bUWMPtJerOrxY6AwW3g4rqrKXvA5Q4QfarLhuxIfppCsQi8n9E35YHhW6B/ze2ts4/TByBLXaVvgINDYLxE2ese7w719vbvmU5/PycecohsjayNAAAACXBIWXMAAC4jAAAuIwF4pT92AAAAB3RJTUUH6AQIEQMnlTSSjwAAABl0RVh0Q29tbWVudABDcmVhdGVkIHdpdGggR0lNUFeBDhcAAAASSURBVDjLY2AYBaNgFIyCoQsABMQAAeRw1DoAAAAASUVORK5CYII="; +static WARMUP_IMAGE_BASE64 : &str = "iVBORw0KGgoAAAANSUhEUgAAACgAAAAoCAAAAACpleexAAAGc0lEQVR4nAFoBpf5AINHnT9oHGHwxejPBqNS161/mUe+CNEM2ZjIb1zZ+/ygXl5vkP9T6lgA+jwpw1IgNJCtWJkY7CNQhfuiZBrm+8cUVPW10g4TQIkAS4kJ+6A5qQEfgO5HoId0MVcs4gEcIILe/jMpXbwGCgORcpeGMh0ANY9Kk2EU3Wh6BMR39APvsV0KW8yzEbc5e8JtBWUhs4tHYZ4XhUg5RUVvh9Tl4/FuaOMEgANG4gnASCYBPa1f5p/+uFPqsTWWEqrrQfbI4HsbRHF0smMiTwTKvQJg0zxGRc8muP0ZnqI1cvgtySZP25FY606dWen/hyiwFpcHuy+6xCKCAszDRskWmbjzRyaAb3I5fR+GeB66N5AnNQJ2GCwhYsQJifZBCw7hsxQAcSxA0TMqUCwvxxa+WbWItKBdDUFuBj6TkgcC9letT2IKSPHMr0tLxwFK76OfHulksUmKDPKS+BgDhbKv88I6GqwdMxapuKaqXh04MKP/xuCOAIR2TZzfBY/vtEJO5hSCjtcVqGA+1VQAOocwBA41Vil7/xK32enA6cUCLM7864NqqADVS45Fc6M2/NjrxCcjwL+2tHI3Bs/sOtf3hjETiP0MGQJI/LP+3Cx9SGzwgF/gtNBJLROxfUkaX+Y3AvmMd/IEuQm0EWYv7c8TAGSNTNdgALrJdPUqCo7+B4lGDX+0mfiIVcUS/bQyRvKdBIxydsNe1fEAHhN2FlGUa1YlbdgQeLRW/rjMZc9YqsnGCIZE4krYv80pEVL+bUzMEAQ8HvAODK/TBsK7ba1Sn0HBh6IwLTVEb6idIwi2jKDfpyQx/U+obVDMBFbaVWPV/JkSWtqUqbon7XYOd8wCNJQWDglmxavw51+FRT1u/cwKKHwAM3cdszRLianvQ8QM3Dp+3iEpvT79x6Zcql+SGRHLAd+BUQAnve2eKwRkk7DB5P9iN/LIlXMgxzdcDii2nmYjMhyB3yVbP6bXHEtIo/WYsP+dBEuAtOQBeBgIpwHaFgYwVCEQ6pGU0lZC0iPh86uzzpPWe/I8q2Dc4CEA6Pnz6QnU1ujnzYsuJ0EptCUWTfcSxccu5tCSHH0jTQgrG6/nDlsrMAQCOj1FyhHoXS9GUWxwU/sV8AFHduFF6okKq5vlX4lRvj1+CNMcHOlnAF2Yui1hfSwSXt8MjPstlEj5TPf7li7JBRKvSjDgAaVcm4EFLmxMLmcCTyE/LivPSEOexyzKp8O/MIPLAiQ/4onPEb2s8ESAQBILVhvlqSB1rwCGH1xEePawEple0P9023HYGrvujCDUMlSdgpB/71rfEfTjgY8uHFgPAPdla93mIt7/uWNathN5EUaD8QOzOcQ541UomYbKERPqRjqSBA7PK0sCz8zpLAeMNVI9+Rx/eQeo5A7FeI7wMQuyYCAaxx9u3GfRqMSXUqTwsQAlQ6dAnhSiTQqzy0ivVCrFhIoWciWl/8+6meceIuJ6rvRqJ/GswyeSBP5AMMelxb2MN7XoaXqL9HEvIC1FUhTrHk5xvA1GSOlTSvOHnvxb5c8Behci1Cburu3pF201hAMQNLcjHgmwnnpZFoPvBKpCDLxXVadIjdO4bwLMOtWuExNtJ3x0qJ1yMWIPuZOL20FJNGXcO2f51co5uhjTk52RSDDEAryv3E7i0xHi7eq3Mh/5u1/Vitqez8PoMq3b5/BVpCFvII4YrYqfi5EBaCSvfvWJ4L3E7/h4Fmk+EbAV4ZzqyAZjeNEa+FbDSplFljz49sIczQJBBHrd0OaqYGLf+nKY7SL6EWG1aBBuPayoNRNxflhTdPYLoz4N5EDVAgwNlJXD3/gcnpE9UizPIauZNhP/1rcnE7gNOdwJ4dyZZFQcJSLWH1kC5xOZ9ls7GIUv4BctR2o73VVy0zICLkRku+34Y/6YXywzG2t4adZW5QK7WqesdSGuIXXaws/lLujr2ujEgzRFdt9p/gyGFUgsY8YH2x3pGCEOAu5G62sOGAbM5vK88t8zqfDdNCPweZhVJ0cHNw5vC3Lims1435Q+DXwE+K7yjXFolURnHhsUUEoUV/u+9kJD0kPUf8vCjB/3IlJNjcrvEuu+NwKVe83Xd9r6ltsqgv614BHA9QOqO9Itnu7PiQPNzWqrV9r269A7rW1rAUmrqu5+w8XzfuT3s6wszZH5xPXovR85dhN0TXjzBo2PIhc38srv5yopvBBpp115NAAAAABJRU5ErkJggg=="; pub type Result = std::result::Result; diff --git a/backends/v2/src/client/mod.rs b/backends/v2/src/client/mod.rs index fa9d440645d..b463cc98a26 100644 --- a/backends/v2/src/client/mod.rs +++ b/backends/v2/src/client/mod.rs @@ -63,6 +63,6 @@ impl From for ClientError { } } -static WARMUP_IMAGE_BASE64 :&str = "iVBORw0KGgoAAAANSUhEUgAAABQAAAAUCAIAAAAC64paAAABg2lDQ1BJQ0MgcHJvZmlsZQAAKJF9kT1Iw0AcxV/TSotUROxQxCFDdbKLijjWKhShQqgVWnUwufQLmrQkKS6OgmvBwY/FqoOLs64OroIg+AHi7OCk6CIl/i8ptIjx4Lgf7+497t4BQqvKNDOQADTdMjKppJjLr4rBVwQQwhAERGVm1uckKQ3P8XUPH1/v4jzL+9yfY0AtmAzwicQJVjcs4g3imU2rznmfOMLKskp8Tjxh0AWJH7muuPzGueSwwDMjRjYzTxwhFks9rPQwKxsa8TRxTNV0yhdyLquctzhr1Qbr3JO/MFzQV5a5TnMUKSxiCRJEKGiggiosxGnVSTGRof2kh3/E8UvkUshVASPHAmrQIDt+8D/43a1ZnJp0k8JJoO/Ftj/GgOAu0G7a9vexbbdPAP8zcKV3/bUWMPtJerOrxY6AwW3g4rqrKXvA5Q4QfarLhuxIfppCsQi8n9E35YHhW6B/ze2ts4/TByBLXaVvgINDYLxE2ese7w719vbvmU5/PycecohsjayNAAAACXBIWXMAAC4jAAAuIwF4pT92AAAAB3RJTUUH6AQIEQMnlTSSjwAAABl0RVh0Q29tbWVudABDcmVhdGVkIHdpdGggR0lNUFeBDhcAAAASSURBVDjLY2AYBaNgFIyCoQsABMQAAeRw1DoAAAAASUVORK5CYII="; +static WARMUP_IMAGE_BASE64 : &str = "iVBORw0KGgoAAAANSUhEUgAAACgAAAAoCAAAAACpleexAAAGc0lEQVR4nAFoBpf5AINHnT9oHGHwxejPBqNS161/mUe+CNEM2ZjIb1zZ+/ygXl5vkP9T6lgA+jwpw1IgNJCtWJkY7CNQhfuiZBrm+8cUVPW10g4TQIkAS4kJ+6A5qQEfgO5HoId0MVcs4gEcIILe/jMpXbwGCgORcpeGMh0ANY9Kk2EU3Wh6BMR39APvsV0KW8yzEbc5e8JtBWUhs4tHYZ4XhUg5RUVvh9Tl4/FuaOMEgANG4gnASCYBPa1f5p/+uFPqsTWWEqrrQfbI4HsbRHF0smMiTwTKvQJg0zxGRc8muP0ZnqI1cvgtySZP25FY606dWen/hyiwFpcHuy+6xCKCAszDRskWmbjzRyaAb3I5fR+GeB66N5AnNQJ2GCwhYsQJifZBCw7hsxQAcSxA0TMqUCwvxxa+WbWItKBdDUFuBj6TkgcC9letT2IKSPHMr0tLxwFK76OfHulksUmKDPKS+BgDhbKv88I6GqwdMxapuKaqXh04MKP/xuCOAIR2TZzfBY/vtEJO5hSCjtcVqGA+1VQAOocwBA41Vil7/xK32enA6cUCLM7864NqqADVS45Fc6M2/NjrxCcjwL+2tHI3Bs/sOtf3hjETiP0MGQJI/LP+3Cx9SGzwgF/gtNBJLROxfUkaX+Y3AvmMd/IEuQm0EWYv7c8TAGSNTNdgALrJdPUqCo7+B4lGDX+0mfiIVcUS/bQyRvKdBIxydsNe1fEAHhN2FlGUa1YlbdgQeLRW/rjMZc9YqsnGCIZE4krYv80pEVL+bUzMEAQ8HvAODK/TBsK7ba1Sn0HBh6IwLTVEb6idIwi2jKDfpyQx/U+obVDMBFbaVWPV/JkSWtqUqbon7XYOd8wCNJQWDglmxavw51+FRT1u/cwKKHwAM3cdszRLianvQ8QM3Dp+3iEpvT79x6Zcql+SGRHLAd+BUQAnve2eKwRkk7DB5P9iN/LIlXMgxzdcDii2nmYjMhyB3yVbP6bXHEtIo/WYsP+dBEuAtOQBeBgIpwHaFgYwVCEQ6pGU0lZC0iPh86uzzpPWe/I8q2Dc4CEA6Pnz6QnU1ujnzYsuJ0EptCUWTfcSxccu5tCSHH0jTQgrG6/nDlsrMAQCOj1FyhHoXS9GUWxwU/sV8AFHduFF6okKq5vlX4lRvj1+CNMcHOlnAF2Yui1hfSwSXt8MjPstlEj5TPf7li7JBRKvSjDgAaVcm4EFLmxMLmcCTyE/LivPSEOexyzKp8O/MIPLAiQ/4onPEb2s8ESAQBILVhvlqSB1rwCGH1xEePawEple0P9023HYGrvujCDUMlSdgpB/71rfEfTjgY8uHFgPAPdla93mIt7/uWNathN5EUaD8QOzOcQ541UomYbKERPqRjqSBA7PK0sCz8zpLAeMNVI9+Rx/eQeo5A7FeI7wMQuyYCAaxx9u3GfRqMSXUqTwsQAlQ6dAnhSiTQqzy0ivVCrFhIoWciWl/8+6meceIuJ6rvRqJ/GswyeSBP5AMMelxb2MN7XoaXqL9HEvIC1FUhTrHk5xvA1GSOlTSvOHnvxb5c8Behci1Cburu3pF201hAMQNLcjHgmwnnpZFoPvBKpCDLxXVadIjdO4bwLMOtWuExNtJ3x0qJ1yMWIPuZOL20FJNGXcO2f51co5uhjTk52RSDDEAryv3E7i0xHi7eq3Mh/5u1/Vitqez8PoMq3b5/BVpCFvII4YrYqfi5EBaCSvfvWJ4L3E7/h4Fmk+EbAV4ZzqyAZjeNEa+FbDSplFljz49sIczQJBBHrd0OaqYGLf+nKY7SL6EWG1aBBuPayoNRNxflhTdPYLoz4N5EDVAgwNlJXD3/gcnpE9UizPIauZNhP/1rcnE7gNOdwJ4dyZZFQcJSLWH1kC5xOZ9ls7GIUv4BctR2o73VVy0zICLkRku+34Y/6YXywzG2t4adZW5QK7WqesdSGuIXXaws/lLujr2ujEgzRFdt9p/gyGFUgsY8YH2x3pGCEOAu5G62sOGAbM5vK88t8zqfDdNCPweZhVJ0cHNw5vC3Lims1435Q+DXwE+K7yjXFolURnHhsUUEoUV/u+9kJD0kPUf8vCjB/3IlJNjcrvEuu+NwKVe83Xd9r6ltsqgv614BHA9QOqO9Itnu7PiQPNzWqrV9r269A7rW1rAUmrqu5+w8XzfuT3s6wszZH5xPXovR85dhN0TXjzBo2PIhc38srv5yopvBBpp115NAAAAABJRU5ErkJggg=="; pub type Result = std::result::Result; diff --git a/backends/v3/src/client/mod.rs b/backends/v3/src/client/mod.rs index d4ac50c9c46..86c783ea3a0 100644 --- a/backends/v3/src/client/mod.rs +++ b/backends/v3/src/client/mod.rs @@ -62,6 +62,6 @@ impl From for InputChunk { } } -static WARMUP_IMAGE_BASE64 :&str = "iVBORw0KGgoAAAANSUhEUgAAABQAAAAUCAIAAAAC64paAAABg2lDQ1BJQ0MgcHJvZmlsZQAAKJF9kT1Iw0AcxV/TSotUROxQxCFDdbKLijjWKhShQqgVWnUwufQLmrQkKS6OgmvBwY/FqoOLs64OroIg+AHi7OCk6CIl/i8ptIjx4Lgf7+497t4BQqvKNDOQADTdMjKppJjLr4rBVwQQwhAERGVm1uckKQ3P8XUPH1/v4jzL+9yfY0AtmAzwicQJVjcs4g3imU2rznmfOMLKskp8Tjxh0AWJH7muuPzGueSwwDMjRjYzTxwhFks9rPQwKxsa8TRxTNV0yhdyLquctzhr1Qbr3JO/MFzQV5a5TnMUKSxiCRJEKGiggiosxGnVSTGRof2kh3/E8UvkUshVASPHAmrQIDt+8D/43a1ZnJp0k8JJoO/Ftj/GgOAu0G7a9vexbbdPAP8zcKV3/bUWMPtJerOrxY6AwW3g4rqrKXvA5Q4QfarLhuxIfppCsQi8n9E35YHhW6B/ze2ts4/TByBLXaVvgINDYLxE2ese7w719vbvmU5/PycecohsjayNAAAACXBIWXMAAC4jAAAuIwF4pT92AAAAB3RJTUUH6AQIEQMnlTSSjwAAABl0RVh0Q29tbWVudABDcmVhdGVkIHdpdGggR0lNUFeBDhcAAAASSURBVDjLY2AYBaNgFIyCoQsABMQAAeRw1DoAAAAASUVORK5CYII="; +static WARMUP_IMAGE_BASE64 : &str = "iVBORw0KGgoAAAANSUhEUgAAACgAAAAoCAAAAACpleexAAAGc0lEQVR4nAFoBpf5AINHnT9oHGHwxejPBqNS161/mUe+CNEM2ZjIb1zZ+/ygXl5vkP9T6lgA+jwpw1IgNJCtWJkY7CNQhfuiZBrm+8cUVPW10g4TQIkAS4kJ+6A5qQEfgO5HoId0MVcs4gEcIILe/jMpXbwGCgORcpeGMh0ANY9Kk2EU3Wh6BMR39APvsV0KW8yzEbc5e8JtBWUhs4tHYZ4XhUg5RUVvh9Tl4/FuaOMEgANG4gnASCYBPa1f5p/+uFPqsTWWEqrrQfbI4HsbRHF0smMiTwTKvQJg0zxGRc8muP0ZnqI1cvgtySZP25FY606dWen/hyiwFpcHuy+6xCKCAszDRskWmbjzRyaAb3I5fR+GeB66N5AnNQJ2GCwhYsQJifZBCw7hsxQAcSxA0TMqUCwvxxa+WbWItKBdDUFuBj6TkgcC9letT2IKSPHMr0tLxwFK76OfHulksUmKDPKS+BgDhbKv88I6GqwdMxapuKaqXh04MKP/xuCOAIR2TZzfBY/vtEJO5hSCjtcVqGA+1VQAOocwBA41Vil7/xK32enA6cUCLM7864NqqADVS45Fc6M2/NjrxCcjwL+2tHI3Bs/sOtf3hjETiP0MGQJI/LP+3Cx9SGzwgF/gtNBJLROxfUkaX+Y3AvmMd/IEuQm0EWYv7c8TAGSNTNdgALrJdPUqCo7+B4lGDX+0mfiIVcUS/bQyRvKdBIxydsNe1fEAHhN2FlGUa1YlbdgQeLRW/rjMZc9YqsnGCIZE4krYv80pEVL+bUzMEAQ8HvAODK/TBsK7ba1Sn0HBh6IwLTVEb6idIwi2jKDfpyQx/U+obVDMBFbaVWPV/JkSWtqUqbon7XYOd8wCNJQWDglmxavw51+FRT1u/cwKKHwAM3cdszRLianvQ8QM3Dp+3iEpvT79x6Zcql+SGRHLAd+BUQAnve2eKwRkk7DB5P9iN/LIlXMgxzdcDii2nmYjMhyB3yVbP6bXHEtIo/WYsP+dBEuAtOQBeBgIpwHaFgYwVCEQ6pGU0lZC0iPh86uzzpPWe/I8q2Dc4CEA6Pnz6QnU1ujnzYsuJ0EptCUWTfcSxccu5tCSHH0jTQgrG6/nDlsrMAQCOj1FyhHoXS9GUWxwU/sV8AFHduFF6okKq5vlX4lRvj1+CNMcHOlnAF2Yui1hfSwSXt8MjPstlEj5TPf7li7JBRKvSjDgAaVcm4EFLmxMLmcCTyE/LivPSEOexyzKp8O/MIPLAiQ/4onPEb2s8ESAQBILVhvlqSB1rwCGH1xEePawEple0P9023HYGrvujCDUMlSdgpB/71rfEfTjgY8uHFgPAPdla93mIt7/uWNathN5EUaD8QOzOcQ541UomYbKERPqRjqSBA7PK0sCz8zpLAeMNVI9+Rx/eQeo5A7FeI7wMQuyYCAaxx9u3GfRqMSXUqTwsQAlQ6dAnhSiTQqzy0ivVCrFhIoWciWl/8+6meceIuJ6rvRqJ/GswyeSBP5AMMelxb2MN7XoaXqL9HEvIC1FUhTrHk5xvA1GSOlTSvOHnvxb5c8Behci1Cburu3pF201hAMQNLcjHgmwnnpZFoPvBKpCDLxXVadIjdO4bwLMOtWuExNtJ3x0qJ1yMWIPuZOL20FJNGXcO2f51co5uhjTk52RSDDEAryv3E7i0xHi7eq3Mh/5u1/Vitqez8PoMq3b5/BVpCFvII4YrYqfi5EBaCSvfvWJ4L3E7/h4Fmk+EbAV4ZzqyAZjeNEa+FbDSplFljz49sIczQJBBHrd0OaqYGLf+nKY7SL6EWG1aBBuPayoNRNxflhTdPYLoz4N5EDVAgwNlJXD3/gcnpE9UizPIauZNhP/1rcnE7gNOdwJ4dyZZFQcJSLWH1kC5xOZ9ls7GIUv4BctR2o73VVy0zICLkRku+34Y/6YXywzG2t4adZW5QK7WqesdSGuIXXaws/lLujr2ujEgzRFdt9p/gyGFUgsY8YH2x3pGCEOAu5G62sOGAbM5vK88t8zqfDdNCPweZhVJ0cHNw5vC3Lims1435Q+DXwE+K7yjXFolURnHhsUUEoUV/u+9kJD0kPUf8vCjB/3IlJNjcrvEuu+NwKVe83Xd9r6ltsqgv614BHA9QOqO9Itnu7PiQPNzWqrV9r269A7rW1rAUmrqu5+w8XzfuT3s6wszZH5xPXovR85dhN0TXjzBo2PIhc38srv5yopvBBpp115NAAAAABJRU5ErkJggg=="; pub type Result = std::result::Result; diff --git a/integration-tests/models/test_flash_qwen2_vl_warmup.py b/integration-tests/models/test_flash_qwen2_vl_warmup.py new file mode 100644 index 00000000000..74456e48df1 --- /dev/null +++ b/integration-tests/models/test_flash_qwen2_vl_warmup.py @@ -0,0 +1,38 @@ +import pytest + + +@pytest.fixture(scope="module") +def flash_qwen2_vl_handle(launcher): + with launcher( + "Qwen/Qwen2-VL-2B-Instruct", + max_input_tokens=40, + max_batch_prefill_tokens=50, + max_total_tokens=51, + ) as handle: + yield handle + + +@pytest.fixture(scope="module") +async def flash_qwen2(flash_qwen2_vl_handle): + await flash_qwen2_vl_handle.health(300) + return flash_qwen2_vl_handle.client + + +@pytest.mark.private +async def test_flash_qwen2_vl_simple(flash_qwen2, response_snapshot): + response = await flash_qwen2.chat( + max_tokens=20, + seed=42, + messages=[ + { + "role": "user", + "content": [ + {"type": "text", "text": "What is the color of the sky?"}, + ], + }, + ], + ) + + assert response.choices[0].message.content == "The correct answer is: blue" + + assert response == response_snapshot diff --git a/server/text_generation_server/models/custom_modeling/qwen2_vl.py b/server/text_generation_server/models/custom_modeling/qwen2_vl.py index a8e1e8c1593..34ddc5c99fe 100644 --- a/server/text_generation_server/models/custom_modeling/qwen2_vl.py +++ b/server/text_generation_server/models/custom_modeling/qwen2_vl.py @@ -533,6 +533,7 @@ def forward( ).squeeze(0) inputs_embeds[input_ids == self.image_token_id] = image_embeds + max_s = max(max_s, inputs_embeds.size(0)) hidden_states = self.text_model( inputs_embeds=inputs_embeds, position_ids=position_ids, diff --git a/server/text_generation_server/models/vlm_causal_lm.py b/server/text_generation_server/models/vlm_causal_lm.py index db78341d1ed..8f1f874c5b9 100644 --- a/server/text_generation_server/models/vlm_causal_lm.py +++ b/server/text_generation_server/models/vlm_causal_lm.py @@ -229,14 +229,6 @@ def batch_tokenized_inputs( pass elif chunk_type == "image": image = Image.open(BytesIO(chunk.image.data)) - # qwen2_vl expects images to be greater than 20 pixels, this is for warmup since the - # default warmup image is 20x20 - if config.model_type == "qwen2_vl": - if image.width <= 20: - w = image.width * 2 - h = image.height * 2 - image = image.resize((w, h)) - if config.model_type == "llava_next": images.append(image) else: From 45e5c2c266c4837bfde5558a026783df31d6de60 Mon Sep 17 00:00:00 2001 From: drbh Date: Fri, 6 Dec 2024 00:54:20 +0000 Subject: [PATCH 2/5] feat: adjust rotary embed and avoid cuda graphs of size 2 and smaller --- .../test_flash_qwen2_vl_simple.json | 26 +++++++++++++++++++ .../models/test_flash_qwen2_vl_warmup.py | 2 +- .../text_generation_server/models/__init__.py | 6 +++++ .../custom_modeling/flash_qwen2_modeling.py | 7 ++++- 4 files changed, 39 insertions(+), 2 deletions(-) create mode 100644 integration-tests/models/__snapshots__/test_flash_qwen2_vl_warmup/test_flash_qwen2_vl_simple.json diff --git a/integration-tests/models/__snapshots__/test_flash_qwen2_vl_warmup/test_flash_qwen2_vl_simple.json b/integration-tests/models/__snapshots__/test_flash_qwen2_vl_warmup/test_flash_qwen2_vl_simple.json new file mode 100644 index 00000000000..a986510f239 --- /dev/null +++ b/integration-tests/models/__snapshots__/test_flash_qwen2_vl_warmup/test_flash_qwen2_vl_simple.json @@ -0,0 +1,26 @@ +{ + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "The correct answer is: blue", + "name": null, + "role": "assistant", + "tool_calls": null + }, + "usage": null + } + ], + "created": 1733445131, + "id": "", + "model": "Qwen/Qwen2-VL-2B-Instruct", + "object": "chat.completion", + "system_fingerprint": "2.4.2-dev0-native", + "usage": { + "completion_tokens": 7, + "prompt_tokens": 27, + "total_tokens": 34 + } +} diff --git a/integration-tests/models/test_flash_qwen2_vl_warmup.py b/integration-tests/models/test_flash_qwen2_vl_warmup.py index 74456e48df1..5be87ee21a3 100644 --- a/integration-tests/models/test_flash_qwen2_vl_warmup.py +++ b/integration-tests/models/test_flash_qwen2_vl_warmup.py @@ -5,7 +5,7 @@ def flash_qwen2_vl_handle(launcher): with launcher( "Qwen/Qwen2-VL-2B-Instruct", - max_input_tokens=40, + max_input_length=40, max_batch_prefill_tokens=50, max_total_tokens=51, ) as handle: diff --git a/server/text_generation_server/models/__init__.py b/server/text_generation_server/models/__init__.py index beefeb01672..a0abae0adb4 100644 --- a/server/text_generation_server/models/__init__.py +++ b/server/text_generation_server/models/__init__.py @@ -29,6 +29,7 @@ BloomForCausalLM, ) from text_generation_server.models.globals import ATTENTION +import text_generation_server.models.globals as globals from text_generation_server.models.seq2seq_lm import Seq2SeqLM from text_generation_server.models.galactica import GalacticaCausalLMBatch from text_generation_server.models.custom_modeling.neox_modeling import ( @@ -1217,6 +1218,11 @@ def get_model( else: raise NotImplementedError(FLASH_ATT_ERROR_MESSAGE.format("Idefics")) if model_type == QWEN2_VL: + # TODO: remove edge case when cuda graph issue is resolved for BS=2 with Qwen2-VL + logger.warning( + "Qwen2-VL requires cuda graphs to be greater than 2. Removing all cuda graphs with a batch size equal or less than 2." + ) + globals.CUDA_GRAPHS = list(filter(lambda x: x > 2, globals.CUDA_GRAPHS)) return VlmCausalLM( model_id=model_id, model_class=Qwen2VLForConditionalGeneration, diff --git a/server/text_generation_server/models/custom_modeling/flash_qwen2_modeling.py b/server/text_generation_server/models/custom_modeling/flash_qwen2_modeling.py index cc4039b1cbc..01d3bf1a377 100644 --- a/server/text_generation_server/models/custom_modeling/flash_qwen2_modeling.py +++ b/server/text_generation_server/models/custom_modeling/flash_qwen2_modeling.py @@ -138,7 +138,12 @@ def forward( dim=-1, ) - self.rotary_emb(query, torch.select(kv, dim=1, index=0), cos, sin) + self.rotary_emb( + query, + torch.select(kv, dim=1, index=0), + cos[: query.shape[0], ...], + sin[: query.shape[0], ...], + ) if prefill_cache_indices is not None: kv_to_cache = kv[prefill_cache_indices] From 822bd045e5c8e8bcee90e33cf9930816ffe7f3b4 Mon Sep 17 00:00:00 2001 From: drbh Date: Mon, 9 Dec 2024 21:25:13 +0000 Subject: [PATCH 3/5] fix: address image resize and rebase changes --- backends/client/src/lib.rs | 2 +- backends/v2/src/client/mod.rs | 2 +- backends/v3/src/client/mod.rs | 2 +- .../models/flash_causal_lm.py | 18 +++++++++++------- .../models/vlm_causal_lm.py | 7 +++++++ 5 files changed, 21 insertions(+), 10 deletions(-) diff --git a/backends/client/src/lib.rs b/backends/client/src/lib.rs index e33d23480e5..fbe2e7e668a 100644 --- a/backends/client/src/lib.rs +++ b/backends/client/src/lib.rs @@ -86,6 +86,6 @@ impl ChunksToString for Vec { } } -static WARMUP_IMAGE_BASE64 : &str = "iVBORw0KGgoAAAANSUhEUgAAACgAAAAoCAAAAACpleexAAAGc0lEQVR4nAFoBpf5AINHnT9oHGHwxejPBqNS161/mUe+CNEM2ZjIb1zZ+/ygXl5vkP9T6lgA+jwpw1IgNJCtWJkY7CNQhfuiZBrm+8cUVPW10g4TQIkAS4kJ+6A5qQEfgO5HoId0MVcs4gEcIILe/jMpXbwGCgORcpeGMh0ANY9Kk2EU3Wh6BMR39APvsV0KW8yzEbc5e8JtBWUhs4tHYZ4XhUg5RUVvh9Tl4/FuaOMEgANG4gnASCYBPa1f5p/+uFPqsTWWEqrrQfbI4HsbRHF0smMiTwTKvQJg0zxGRc8muP0ZnqI1cvgtySZP25FY606dWen/hyiwFpcHuy+6xCKCAszDRskWmbjzRyaAb3I5fR+GeB66N5AnNQJ2GCwhYsQJifZBCw7hsxQAcSxA0TMqUCwvxxa+WbWItKBdDUFuBj6TkgcC9letT2IKSPHMr0tLxwFK76OfHulksUmKDPKS+BgDhbKv88I6GqwdMxapuKaqXh04MKP/xuCOAIR2TZzfBY/vtEJO5hSCjtcVqGA+1VQAOocwBA41Vil7/xK32enA6cUCLM7864NqqADVS45Fc6M2/NjrxCcjwL+2tHI3Bs/sOtf3hjETiP0MGQJI/LP+3Cx9SGzwgF/gtNBJLROxfUkaX+Y3AvmMd/IEuQm0EWYv7c8TAGSNTNdgALrJdPUqCo7+B4lGDX+0mfiIVcUS/bQyRvKdBIxydsNe1fEAHhN2FlGUa1YlbdgQeLRW/rjMZc9YqsnGCIZE4krYv80pEVL+bUzMEAQ8HvAODK/TBsK7ba1Sn0HBh6IwLTVEb6idIwi2jKDfpyQx/U+obVDMBFbaVWPV/JkSWtqUqbon7XYOd8wCNJQWDglmxavw51+FRT1u/cwKKHwAM3cdszRLianvQ8QM3Dp+3iEpvT79x6Zcql+SGRHLAd+BUQAnve2eKwRkk7DB5P9iN/LIlXMgxzdcDii2nmYjMhyB3yVbP6bXHEtIo/WYsP+dBEuAtOQBeBgIpwHaFgYwVCEQ6pGU0lZC0iPh86uzzpPWe/I8q2Dc4CEA6Pnz6QnU1ujnzYsuJ0EptCUWTfcSxccu5tCSHH0jTQgrG6/nDlsrMAQCOj1FyhHoXS9GUWxwU/sV8AFHduFF6okKq5vlX4lRvj1+CNMcHOlnAF2Yui1hfSwSXt8MjPstlEj5TPf7li7JBRKvSjDgAaVcm4EFLmxMLmcCTyE/LivPSEOexyzKp8O/MIPLAiQ/4onPEb2s8ESAQBILVhvlqSB1rwCGH1xEePawEple0P9023HYGrvujCDUMlSdgpB/71rfEfTjgY8uHFgPAPdla93mIt7/uWNathN5EUaD8QOzOcQ541UomYbKERPqRjqSBA7PK0sCz8zpLAeMNVI9+Rx/eQeo5A7FeI7wMQuyYCAaxx9u3GfRqMSXUqTwsQAlQ6dAnhSiTQqzy0ivVCrFhIoWciWl/8+6meceIuJ6rvRqJ/GswyeSBP5AMMelxb2MN7XoaXqL9HEvIC1FUhTrHk5xvA1GSOlTSvOHnvxb5c8Behci1Cburu3pF201hAMQNLcjHgmwnnpZFoPvBKpCDLxXVadIjdO4bwLMOtWuExNtJ3x0qJ1yMWIPuZOL20FJNGXcO2f51co5uhjTk52RSDDEAryv3E7i0xHi7eq3Mh/5u1/Vitqez8PoMq3b5/BVpCFvII4YrYqfi5EBaCSvfvWJ4L3E7/h4Fmk+EbAV4ZzqyAZjeNEa+FbDSplFljz49sIczQJBBHrd0OaqYGLf+nKY7SL6EWG1aBBuPayoNRNxflhTdPYLoz4N5EDVAgwNlJXD3/gcnpE9UizPIauZNhP/1rcnE7gNOdwJ4dyZZFQcJSLWH1kC5xOZ9ls7GIUv4BctR2o73VVy0zICLkRku+34Y/6YXywzG2t4adZW5QK7WqesdSGuIXXaws/lLujr2ujEgzRFdt9p/gyGFUgsY8YH2x3pGCEOAu5G62sOGAbM5vK88t8zqfDdNCPweZhVJ0cHNw5vC3Lims1435Q+DXwE+K7yjXFolURnHhsUUEoUV/u+9kJD0kPUf8vCjB/3IlJNjcrvEuu+NwKVe83Xd9r6ltsqgv614BHA9QOqO9Itnu7PiQPNzWqrV9r269A7rW1rAUmrqu5+w8XzfuT3s6wszZH5xPXovR85dhN0TXjzBo2PIhc38srv5yopvBBpp115NAAAAABJRU5ErkJggg=="; +static WARMUP_IMAGE_BASE64: &str = "iVBORw0KGgoAAAANSUhEUgAAABQAAAAUCAIAAAAC64paAAABg2lDQ1BJQ0MgcHJvZmlsZQAAKJF9kT1Iw0AcxV/TSotUROxQxCFDdbKLijjWKhShQqgVWnUwufQLmrQkKS6OgmvBwY/FqoOLs64OroIg+AHi7OCk6CIl/i8ptIjx4Lgf7+497t4BQqvKNDOQADTdMjKppJjLr4rBVwQQwhAERGVm1uckKQ3P8XUPH1/v4jzL+9yfY0AtmAzwicQJVjcs4g3imU2rznmfOMLKskp8Tjxh0AWJH7muuPzGueSwwDMjRjYzTxwhFks9rPQwKxsa8TRxTNV0yhdyLquctzhr1Qbr3JO/MFzQV5a5TnMUKSxiCRJEKGiggiosxGnVSTGRof2kh3/E8UvkUshVASPHAmrQIDt+8D/43a1ZnJp0k8JJoO/Ftj/GgOAu0G7a9vexbbdPAP8zcKV3/bUWMPtJerOrxY6AwW3g4rqrKXvA5Q4QfarLhuxIfppCsQi8n9E35YHhW6B/ze2ts4/TByBLXaVvgINDYLxE2ese7w719vbvmU5/PycecohsjayNAAAACXBIWXMAAC4jAAAuIwF4pT92AAAAB3RJTUUH6AQIEQMnlTSSjwAAABl0RVh0Q29tbWVudABDcmVhdGVkIHdpdGggR0lNUFeBDhcAAAASSURBVDjLY2AYBaNgFIyCoQsABMQAAeRw1DoAAAAASUVORK5CYII="; pub type Result = std::result::Result; diff --git a/backends/v2/src/client/mod.rs b/backends/v2/src/client/mod.rs index b463cc98a26..9fe114a2c87 100644 --- a/backends/v2/src/client/mod.rs +++ b/backends/v2/src/client/mod.rs @@ -63,6 +63,6 @@ impl From for ClientError { } } -static WARMUP_IMAGE_BASE64 : &str = "iVBORw0KGgoAAAANSUhEUgAAACgAAAAoCAAAAACpleexAAAGc0lEQVR4nAFoBpf5AINHnT9oHGHwxejPBqNS161/mUe+CNEM2ZjIb1zZ+/ygXl5vkP9T6lgA+jwpw1IgNJCtWJkY7CNQhfuiZBrm+8cUVPW10g4TQIkAS4kJ+6A5qQEfgO5HoId0MVcs4gEcIILe/jMpXbwGCgORcpeGMh0ANY9Kk2EU3Wh6BMR39APvsV0KW8yzEbc5e8JtBWUhs4tHYZ4XhUg5RUVvh9Tl4/FuaOMEgANG4gnASCYBPa1f5p/+uFPqsTWWEqrrQfbI4HsbRHF0smMiTwTKvQJg0zxGRc8muP0ZnqI1cvgtySZP25FY606dWen/hyiwFpcHuy+6xCKCAszDRskWmbjzRyaAb3I5fR+GeB66N5AnNQJ2GCwhYsQJifZBCw7hsxQAcSxA0TMqUCwvxxa+WbWItKBdDUFuBj6TkgcC9letT2IKSPHMr0tLxwFK76OfHulksUmKDPKS+BgDhbKv88I6GqwdMxapuKaqXh04MKP/xuCOAIR2TZzfBY/vtEJO5hSCjtcVqGA+1VQAOocwBA41Vil7/xK32enA6cUCLM7864NqqADVS45Fc6M2/NjrxCcjwL+2tHI3Bs/sOtf3hjETiP0MGQJI/LP+3Cx9SGzwgF/gtNBJLROxfUkaX+Y3AvmMd/IEuQm0EWYv7c8TAGSNTNdgALrJdPUqCo7+B4lGDX+0mfiIVcUS/bQyRvKdBIxydsNe1fEAHhN2FlGUa1YlbdgQeLRW/rjMZc9YqsnGCIZE4krYv80pEVL+bUzMEAQ8HvAODK/TBsK7ba1Sn0HBh6IwLTVEb6idIwi2jKDfpyQx/U+obVDMBFbaVWPV/JkSWtqUqbon7XYOd8wCNJQWDglmxavw51+FRT1u/cwKKHwAM3cdszRLianvQ8QM3Dp+3iEpvT79x6Zcql+SGRHLAd+BUQAnve2eKwRkk7DB5P9iN/LIlXMgxzdcDii2nmYjMhyB3yVbP6bXHEtIo/WYsP+dBEuAtOQBeBgIpwHaFgYwVCEQ6pGU0lZC0iPh86uzzpPWe/I8q2Dc4CEA6Pnz6QnU1ujnzYsuJ0EptCUWTfcSxccu5tCSHH0jTQgrG6/nDlsrMAQCOj1FyhHoXS9GUWxwU/sV8AFHduFF6okKq5vlX4lRvj1+CNMcHOlnAF2Yui1hfSwSXt8MjPstlEj5TPf7li7JBRKvSjDgAaVcm4EFLmxMLmcCTyE/LivPSEOexyzKp8O/MIPLAiQ/4onPEb2s8ESAQBILVhvlqSB1rwCGH1xEePawEple0P9023HYGrvujCDUMlSdgpB/71rfEfTjgY8uHFgPAPdla93mIt7/uWNathN5EUaD8QOzOcQ541UomYbKERPqRjqSBA7PK0sCz8zpLAeMNVI9+Rx/eQeo5A7FeI7wMQuyYCAaxx9u3GfRqMSXUqTwsQAlQ6dAnhSiTQqzy0ivVCrFhIoWciWl/8+6meceIuJ6rvRqJ/GswyeSBP5AMMelxb2MN7XoaXqL9HEvIC1FUhTrHk5xvA1GSOlTSvOHnvxb5c8Behci1Cburu3pF201hAMQNLcjHgmwnnpZFoPvBKpCDLxXVadIjdO4bwLMOtWuExNtJ3x0qJ1yMWIPuZOL20FJNGXcO2f51co5uhjTk52RSDDEAryv3E7i0xHi7eq3Mh/5u1/Vitqez8PoMq3b5/BVpCFvII4YrYqfi5EBaCSvfvWJ4L3E7/h4Fmk+EbAV4ZzqyAZjeNEa+FbDSplFljz49sIczQJBBHrd0OaqYGLf+nKY7SL6EWG1aBBuPayoNRNxflhTdPYLoz4N5EDVAgwNlJXD3/gcnpE9UizPIauZNhP/1rcnE7gNOdwJ4dyZZFQcJSLWH1kC5xOZ9ls7GIUv4BctR2o73VVy0zICLkRku+34Y/6YXywzG2t4adZW5QK7WqesdSGuIXXaws/lLujr2ujEgzRFdt9p/gyGFUgsY8YH2x3pGCEOAu5G62sOGAbM5vK88t8zqfDdNCPweZhVJ0cHNw5vC3Lims1435Q+DXwE+K7yjXFolURnHhsUUEoUV/u+9kJD0kPUf8vCjB/3IlJNjcrvEuu+NwKVe83Xd9r6ltsqgv614BHA9QOqO9Itnu7PiQPNzWqrV9r269A7rW1rAUmrqu5+w8XzfuT3s6wszZH5xPXovR85dhN0TXjzBo2PIhc38srv5yopvBBpp115NAAAAABJRU5ErkJggg=="; +static WARMUP_IMAGE_BASE64: &str = "iVBORw0KGgoAAAANSUhEUgAAABQAAAAUCAIAAAAC64paAAABg2lDQ1BJQ0MgcHJvZmlsZQAAKJF9kT1Iw0AcxV/TSotUROxQxCFDdbKLijjWKhShQqgVWnUwufQLmrQkKS6OgmvBwY/FqoOLs64OroIg+AHi7OCk6CIl/i8ptIjx4Lgf7+497t4BQqvKNDOQADTdMjKppJjLr4rBVwQQwhAERGVm1uckKQ3P8XUPH1/v4jzL+9yfY0AtmAzwicQJVjcs4g3imU2rznmfOMLKskp8Tjxh0AWJH7muuPzGueSwwDMjRjYzTxwhFks9rPQwKxsa8TRxTNV0yhdyLquctzhr1Qbr3JO/MFzQV5a5TnMUKSxiCRJEKGiggiosxGnVSTGRof2kh3/E8UvkUshVASPHAmrQIDt+8D/43a1ZnJp0k8JJoO/Ftj/GgOAu0G7a9vexbbdPAP8zcKV3/bUWMPtJerOrxY6AwW3g4rqrKXvA5Q4QfarLhuxIfppCsQi8n9E35YHhW6B/ze2ts4/TByBLXaVvgINDYLxE2ese7w719vbvmU5/PycecohsjayNAAAACXBIWXMAAC4jAAAuIwF4pT92AAAAB3RJTUUH6AQIEQMnlTSSjwAAABl0RVh0Q29tbWVudABDcmVhdGVkIHdpdGggR0lNUFeBDhcAAAASSURBVDjLY2AYBaNgFIyCoQsABMQAAeRw1DoAAAAASUVORK5CYII="; pub type Result = std::result::Result; diff --git a/backends/v3/src/client/mod.rs b/backends/v3/src/client/mod.rs index 86c783ea3a0..ab4311c3b8c 100644 --- a/backends/v3/src/client/mod.rs +++ b/backends/v3/src/client/mod.rs @@ -62,6 +62,6 @@ impl From for InputChunk { } } -static WARMUP_IMAGE_BASE64 : &str = "iVBORw0KGgoAAAANSUhEUgAAACgAAAAoCAAAAACpleexAAAGc0lEQVR4nAFoBpf5AINHnT9oHGHwxejPBqNS161/mUe+CNEM2ZjIb1zZ+/ygXl5vkP9T6lgA+jwpw1IgNJCtWJkY7CNQhfuiZBrm+8cUVPW10g4TQIkAS4kJ+6A5qQEfgO5HoId0MVcs4gEcIILe/jMpXbwGCgORcpeGMh0ANY9Kk2EU3Wh6BMR39APvsV0KW8yzEbc5e8JtBWUhs4tHYZ4XhUg5RUVvh9Tl4/FuaOMEgANG4gnASCYBPa1f5p/+uFPqsTWWEqrrQfbI4HsbRHF0smMiTwTKvQJg0zxGRc8muP0ZnqI1cvgtySZP25FY606dWen/hyiwFpcHuy+6xCKCAszDRskWmbjzRyaAb3I5fR+GeB66N5AnNQJ2GCwhYsQJifZBCw7hsxQAcSxA0TMqUCwvxxa+WbWItKBdDUFuBj6TkgcC9letT2IKSPHMr0tLxwFK76OfHulksUmKDPKS+BgDhbKv88I6GqwdMxapuKaqXh04MKP/xuCOAIR2TZzfBY/vtEJO5hSCjtcVqGA+1VQAOocwBA41Vil7/xK32enA6cUCLM7864NqqADVS45Fc6M2/NjrxCcjwL+2tHI3Bs/sOtf3hjETiP0MGQJI/LP+3Cx9SGzwgF/gtNBJLROxfUkaX+Y3AvmMd/IEuQm0EWYv7c8TAGSNTNdgALrJdPUqCo7+B4lGDX+0mfiIVcUS/bQyRvKdBIxydsNe1fEAHhN2FlGUa1YlbdgQeLRW/rjMZc9YqsnGCIZE4krYv80pEVL+bUzMEAQ8HvAODK/TBsK7ba1Sn0HBh6IwLTVEb6idIwi2jKDfpyQx/U+obVDMBFbaVWPV/JkSWtqUqbon7XYOd8wCNJQWDglmxavw51+FRT1u/cwKKHwAM3cdszRLianvQ8QM3Dp+3iEpvT79x6Zcql+SGRHLAd+BUQAnve2eKwRkk7DB5P9iN/LIlXMgxzdcDii2nmYjMhyB3yVbP6bXHEtIo/WYsP+dBEuAtOQBeBgIpwHaFgYwVCEQ6pGU0lZC0iPh86uzzpPWe/I8q2Dc4CEA6Pnz6QnU1ujnzYsuJ0EptCUWTfcSxccu5tCSHH0jTQgrG6/nDlsrMAQCOj1FyhHoXS9GUWxwU/sV8AFHduFF6okKq5vlX4lRvj1+CNMcHOlnAF2Yui1hfSwSXt8MjPstlEj5TPf7li7JBRKvSjDgAaVcm4EFLmxMLmcCTyE/LivPSEOexyzKp8O/MIPLAiQ/4onPEb2s8ESAQBILVhvlqSB1rwCGH1xEePawEple0P9023HYGrvujCDUMlSdgpB/71rfEfTjgY8uHFgPAPdla93mIt7/uWNathN5EUaD8QOzOcQ541UomYbKERPqRjqSBA7PK0sCz8zpLAeMNVI9+Rx/eQeo5A7FeI7wMQuyYCAaxx9u3GfRqMSXUqTwsQAlQ6dAnhSiTQqzy0ivVCrFhIoWciWl/8+6meceIuJ6rvRqJ/GswyeSBP5AMMelxb2MN7XoaXqL9HEvIC1FUhTrHk5xvA1GSOlTSvOHnvxb5c8Behci1Cburu3pF201hAMQNLcjHgmwnnpZFoPvBKpCDLxXVadIjdO4bwLMOtWuExNtJ3x0qJ1yMWIPuZOL20FJNGXcO2f51co5uhjTk52RSDDEAryv3E7i0xHi7eq3Mh/5u1/Vitqez8PoMq3b5/BVpCFvII4YrYqfi5EBaCSvfvWJ4L3E7/h4Fmk+EbAV4ZzqyAZjeNEa+FbDSplFljz49sIczQJBBHrd0OaqYGLf+nKY7SL6EWG1aBBuPayoNRNxflhTdPYLoz4N5EDVAgwNlJXD3/gcnpE9UizPIauZNhP/1rcnE7gNOdwJ4dyZZFQcJSLWH1kC5xOZ9ls7GIUv4BctR2o73VVy0zICLkRku+34Y/6YXywzG2t4adZW5QK7WqesdSGuIXXaws/lLujr2ujEgzRFdt9p/gyGFUgsY8YH2x3pGCEOAu5G62sOGAbM5vK88t8zqfDdNCPweZhVJ0cHNw5vC3Lims1435Q+DXwE+K7yjXFolURnHhsUUEoUV/u+9kJD0kPUf8vCjB/3IlJNjcrvEuu+NwKVe83Xd9r6ltsqgv614BHA9QOqO9Itnu7PiQPNzWqrV9r269A7rW1rAUmrqu5+w8XzfuT3s6wszZH5xPXovR85dhN0TXjzBo2PIhc38srv5yopvBBpp115NAAAAABJRU5ErkJggg=="; +static WARMUP_IMAGE_BASE64: &str = "iVBORw0KGgoAAAANSUhEUgAAABQAAAAUCAIAAAAC64paAAABg2lDQ1BJQ0MgcHJvZmlsZQAAKJF9kT1Iw0AcxV/TSotUROxQxCFDdbKLijjWKhShQqgVWnUwufQLmrQkKS6OgmvBwY/FqoOLs64OroIg+AHi7OCk6CIl/i8ptIjx4Lgf7+497t4BQqvKNDOQADTdMjKppJjLr4rBVwQQwhAERGVm1uckKQ3P8XUPH1/v4jzL+9yfY0AtmAzwicQJVjcs4g3imU2rznmfOMLKskp8Tjxh0AWJH7muuPzGueSwwDMjRjYzTxwhFks9rPQwKxsa8TRxTNV0yhdyLquctzhr1Qbr3JO/MFzQV5a5TnMUKSxiCRJEKGiggiosxGnVSTGRof2kh3/E8UvkUshVASPHAmrQIDt+8D/43a1ZnJp0k8JJoO/Ftj/GgOAu0G7a9vexbbdPAP8zcKV3/bUWMPtJerOrxY6AwW3g4rqrKXvA5Q4QfarLhuxIfppCsQi8n9E35YHhW6B/ze2ts4/TByBLXaVvgINDYLxE2ese7w719vbvmU5/PycecohsjayNAAAACXBIWXMAAC4jAAAuIwF4pT92AAAAB3RJTUUH6AQIEQMnlTSSjwAAABl0RVh0Q29tbWVudABDcmVhdGVkIHdpdGggR0lNUFeBDhcAAAASSURBVDjLY2AYBaNgFIyCoQsABMQAAeRw1DoAAAAASUVORK5CYII="; pub type Result = std::result::Result; diff --git a/server/text_generation_server/models/flash_causal_lm.py b/server/text_generation_server/models/flash_causal_lm.py index d097c54fc2c..f2d27db9181 100644 --- a/server/text_generation_server/models/flash_causal_lm.py +++ b/server/text_generation_server/models/flash_causal_lm.py @@ -56,11 +56,13 @@ MEM_POOL, ATTENTION, BLOCK_SIZE, - CUDA_GRAPHS, REQUEST_LOGPROBS, TGI_WIGGLE_ROOM, get_adapter_to_index, ) + +# avoid coping CUDA_GRAPHS value by importing globals as a module +import text_generation_server.models.globals as globals from text_generation_server.layers.attention import KVCache, Seqlen from text_generation_server.utils import StoppingCriteria, HeterogeneousNextTokenChooser from text_generation_server.utils.dist import MEMORY_FRACTION @@ -1635,8 +1637,8 @@ def warmup( int(val) for val in os.environ["PYTORCH_TUNABLEOP_SEQLENS"].split(",") ] - elif CUDA_GRAPHS is not None: - tuning_sequences = CUDA_GRAPHS + elif globals.CUDA_GRAPHS is not None: + tuning_sequences = globals.CUDA_GRAPHS else: tuning_sequences = [1, 2, 3, 4, 5, 6, 7] @@ -1675,13 +1677,14 @@ def warmup( "PyTorch ROCm TunableOp (https://github.com/pytorch/pytorch/tree/main/aten/src/ATen/cuda/tunable) is disabled. TunableOp brings an additional 5-8% latency improvement for small sequence lengths but requires a warmup. If necessary, please use the environment variable PYTORCH_TUNABLEOP_ENABLED=1 to enable TunableOp.", ) - if CUDA_GRAPHS: + if globals.CUDA_GRAPHS: try: log_master( - logger.info, f"Cuda Graphs are enabled for sizes {CUDA_GRAPHS}" + logger.info, + f"Cuda Graphs are enabled for sizes {globals.CUDA_GRAPHS}", ) # Warmup cuda graphs - for bs in CUDA_GRAPHS: + for bs in globals.CUDA_GRAPHS: synchronize(self.device) free_memory = get_free_memory( self.device, MEMORY_FRACTION * TGI_WIGGLE_ROOM @@ -1705,7 +1708,8 @@ def warmup( logger.exception("Decode cuda graph warmup failed") else: log_master( - logger.info, f"Cuda Graphs are disabled (CUDA_GRAPHS={CUDA_GRAPHS})." + logger.info, + f"Cuda Graphs are disabled (CUDA_GRAPHS={globals.CUDA_GRAPHS}).", ) assert max_input_tokens is not None diff --git a/server/text_generation_server/models/vlm_causal_lm.py b/server/text_generation_server/models/vlm_causal_lm.py index 8f1f874c5b9..4d6ea84e023 100644 --- a/server/text_generation_server/models/vlm_causal_lm.py +++ b/server/text_generation_server/models/vlm_causal_lm.py @@ -229,6 +229,13 @@ def batch_tokenized_inputs( pass elif chunk_type == "image": image = Image.open(BytesIO(chunk.image.data)) + # qwen2_vl expects images to be greater than 20 pixels, this is for warmup since the + # default warmup image is 20x20 + if config.model_type == "qwen2_vl": + if image.width <= 20: + w = image.width * 2 + h = image.height * 2 + image = image.resize((w, h)) if config.model_type == "llava_next": images.append(image) else: From bd59f96135227b020f2f95b72247f8b8dfea7b8a Mon Sep 17 00:00:00 2001 From: drbh Date: Tue, 14 Jan 2025 22:15:04 +0000 Subject: [PATCH 4/5] feat: update to run qwen2-vl tests --- .../models/test_flash_qwen2_vl.py | 161 +++++++++--------- 1 file changed, 80 insertions(+), 81 deletions(-) diff --git a/integration-tests/models/test_flash_qwen2_vl.py b/integration-tests/models/test_flash_qwen2_vl.py index 97a533fc5d4..946ab2f1efb 100644 --- a/integration-tests/models/test_flash_qwen2_vl.py +++ b/integration-tests/models/test_flash_qwen2_vl.py @@ -1,81 +1,80 @@ -# Disabled because it's broken. -# import pytest -# -# -# @pytest.fixture(scope="module") -# def flash_qwen2_vl_handle(launcher): -# with launcher("Qwen/Qwen2-VL-7B-Instruct") as handle: -# yield handle -# -# -# @pytest.fixture(scope="module") -# async def flash_qwen2(flash_qwen2_vl_handle): -# await flash_qwen2_vl_handle.health(300) -# return flash_qwen2_vl_handle.client -# -# -# @pytest.mark.private -# async def test_flash_qwen2_vl_simple(flash_qwen2, response_snapshot): -# response = await flash_qwen2.chat( -# max_tokens=100, -# seed=42, -# messages=[ -# { -# "role": "user", -# "content": [ -# { -# "type": "image_url", -# "image_url": { -# "url": "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/rabbit.png" -# }, -# }, -# {"type": "text", "text": "Describe this image."}, -# ], -# }, -# ], -# ) -# -# assert ( -# response.choices[0].message.content -# == "The image depicts an anthropomorphic rabbit, wearing a futuristic spacesuit, in an extraterrestrial environment. The setting appears to be a red planet resembling Mars, with rugged terrain and rocky formations in the background. The moon is visible in the distant sky, adding to the lunar landscape." -# ) -# -# assert response == response_snapshot -# -# -# @pytest.mark.private -# async def test_flash_qwen2_vl_simple_streaming(flash_qwen2, response_snapshot): -# responses = await flash_qwen2.chat( -# max_tokens=100, -# seed=42, -# messages=[ -# { -# "role": "user", -# "content": [ -# { -# "type": "image_url", -# "image_url": { -# "url": "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/rabbit.png" -# }, -# }, -# {"type": "text", "text": "Describe this image."}, -# ], -# }, -# ], -# stream=True, -# ) -# -# count = 0 -# generated = "" -# last_response = None -# async for response in responses: -# count += 1 -# generated += response.choices[0].delta.content -# last_response = response -# -# assert ( -# generated -# == "The image depicts an anthropomorphic rabbit, wearing a futuristic spacesuit, in an extraterrestrial environment. The setting appears to be a red planet resembling Mars, with rugged terrain and rocky formations in the background. The moon is visible in the distant sky, adding to the lunar landscape." -# ) -# assert count == 58 -# assert last_response == response_snapshot +import pytest + + +@pytest.fixture(scope="module") +def flash_qwen2_vl_handle(launcher): + with launcher("Qwen/Qwen2-VL-7B-Instruct") as handle: + yield handle + + +@pytest.fixture(scope="module") +async def flash_qwen2(flash_qwen2_vl_handle): + await flash_qwen2_vl_handle.health(300) + return flash_qwen2_vl_handle.client + + +@pytest.mark.private +async def test_flash_qwen2_vl_simple(flash_qwen2, response_snapshot): + response = await flash_qwen2.chat( + max_tokens=100, + seed=42, + messages=[ + { + "role": "user", + "content": [ + { + "type": "image_url", + "image_url": { + "url": "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/rabbit.png" + }, + }, + {"type": "text", "text": "Describe this image."}, + ], + }, + ], + ) + + assert ( + response.choices[0].message.content + == "The image depicts an anthropomorphic rabbit, wearing a futuristic spacesuit, in an extraterrestrial environment. The setting appears to be a red planet resembling Mars, with rugged terrain and rocky formations in the background. The moon is visible in the distant sky, adding to the lunar landscape." + ) + + assert response == response_snapshot + + +@pytest.mark.private +async def test_flash_qwen2_vl_simple_streaming(flash_qwen2, response_snapshot): + responses = await flash_qwen2.chat( + max_tokens=100, + seed=42, + messages=[ + { + "role": "user", + "content": [ + { + "type": "image_url", + "image_url": { + "url": "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/rabbit.png" + }, + }, + {"type": "text", "text": "Describe this image."}, + ], + }, + ], + stream=True, + ) + + count = 0 + generated = "" + last_response = None + async for response in responses: + count += 1 + generated += response.choices[0].delta.content + last_response = response + + assert ( + generated + == "The image depicts an anthropomorphic rabbit, wearing a futuristic spacesuit, in an extraterrestrial environment. The setting appears to be a red planet resembling Mars, with rugged terrain and rocky formations in the background. The moon is visible in the distant sky, adding to the lunar landscape." + ) + assert count == 58 + assert last_response == response_snapshot From 37f92f2c044cdf6c6a6008311059c55e8fcfc533 Mon Sep 17 00:00:00 2001 From: drbh Date: Thu, 16 Jan 2025 17:42:52 +0000 Subject: [PATCH 5/5] fix: tweak param types --- .../text_generation_server/models/custom_modeling/qwen2_vl.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/text_generation_server/models/custom_modeling/qwen2_vl.py b/server/text_generation_server/models/custom_modeling/qwen2_vl.py index 34ddc5c99fe..95cf6a318e0 100644 --- a/server/text_generation_server/models/custom_modeling/qwen2_vl.py +++ b/server/text_generation_server/models/custom_modeling/qwen2_vl.py @@ -517,11 +517,11 @@ def forward( pixel_values: torch.FloatTensor = None, image_grid_thw: Optional[torch.LongTensor] = None, video_grid_thw: Optional[torch.LongTensor] = None, - pixel_attention_mask=None, + pixel_attention_mask: Optional[torch.Tensor] = None, image_sizes: Optional[torch.LongTensor] = None, adapter_data: Optional[torch.Tensor] = None, cross_attention_states: Optional[torch.Tensor] = None, - image_indices=None, + image_indices: Optional[torch.Tensor] = None, ): inputs_embeds = self.embed_tokens(input_ids)