net/SocketDescriptor: add SetTcpUserTimeout()

This commit is contained in:
Max Kellermann 2019-02-27 23:22:12 +01:00
parent 713c1f2ba9
commit 20b6e0d684
2 changed files with 15 additions and 2 deletions

View File

@ -1,5 +1,5 @@
/*
* Copyright (C) 2012-2017 Max Kellermann <max.kellermann@gmail.com>
* Copyright 2012-2019 Max Kellermann <max.kellermann@gmail.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@ -315,6 +315,13 @@ SocketDescriptor::SetTcpDeferAccept(const int &seconds) noexcept
return SetOption(IPPROTO_TCP, TCP_DEFER_ACCEPT, &seconds, sizeof(seconds));
}
bool
SocketDescriptor::SetTcpUserTimeout(const unsigned &milliseconds) noexcept
{
return SetOption(IPPROTO_TCP, TCP_USER_TIMEOUT,
&milliseconds, sizeof(milliseconds));
}
bool
SocketDescriptor::SetV6Only(bool value) noexcept
{

View File

@ -1,5 +1,5 @@
/*
* Copyright (C) 2012-2017 Max Kellermann <max.kellermann@gmail.com>
* Copyright 2012-2019 Max Kellermann <max.kellermann@gmail.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@ -188,6 +188,12 @@ public:
bool SetCork(bool value=true) noexcept;
bool SetTcpDeferAccept(const int &seconds) noexcept;
/**
* Setter for TCP_USER_TIMEOUT.
*/
bool SetTcpUserTimeout(const unsigned &milliseconds) noexcept;
bool SetV6Only(bool value) noexcept;
/**