From d20aeefef2abc422a5ab6fc3ad229f49a9a437c3 Mon Sep 17 00:00:00 2001
From: Justin Clark-Casey (justincc)
Date: Tue, 7 Oct 2014 01:08:22 +0100
Subject: Fix console set and get of max scene rate. Was performing wrong
calculation - throttle buckets are set in bytes, not bits
---
OpenSim/Region/ClientStack/Linden/UDP/LLUDPServerCommands.cs | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
(limited to 'OpenSim/Region/ClientStack')
diff --git a/OpenSim/Region/ClientStack/Linden/UDP/LLUDPServerCommands.cs b/OpenSim/Region/ClientStack/Linden/UDP/LLUDPServerCommands.cs
index 0d49879..20ae188 100644
--- a/OpenSim/Region/ClientStack/Linden/UDP/LLUDPServerCommands.cs
+++ b/OpenSim/Region/ClientStack/Linden/UDP/LLUDPServerCommands.cs
@@ -159,7 +159,7 @@ namespace OpenSim.Region.ClientStack.LindenUDP
"debug lludp set",
"debug lludp set ",
"Set a parameter for the server.",
- "Only current setting is 'scene-throttle-max' which sets the current max cumulative kbit/s provided for this scene to clients",
+ "Only current setting is 'scene-throttle-max' which sets the current max cumulative kbps provided for this scene to clients",
HandleSetCommand);
m_console.Commands.AddCommand(
@@ -181,7 +181,7 @@ namespace OpenSim.Region.ClientStack.LindenUDP
cdl.AddRow("Adaptive throttles", m_udpServer.ThrottleRates.AdaptiveThrottlesEnabled);
cdl.AddRow(
"Max scene throttle",
- m_udpServer.MaxTotalDripRate != 0 ? string.Format("{0} kbit", m_udpServer.MaxTotalDripRate / 8 / 1000) : "unset");
+ m_udpServer.MaxTotalDripRate != 0 ? string.Format("{0} kbps", m_udpServer.MaxTotalDripRate * 8 / 1000) : "unset");
m_console.Output(cdl.ToString());
@@ -418,7 +418,7 @@ namespace OpenSim.Region.ClientStack.LindenUDP
if (!ConsoleUtil.TryParseConsoleInt(MainConsole.Instance, rawValue, out newValue))
return;
- m_udpServer.Throttle.RequestedDripRate = newValue * 8 * 1000;
+ m_udpServer.Throttle.RequestedDripRate = newValue * 1000 / 8;
}
m_console.OutputFormat("{0} set to {1} in {2}", param, rawValue, m_udpServer.Scene.Name);
--
cgit v1.1